[exo-jcr-commits] exo-jcr SVN: r1058 - in jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query: lucene and 1 other directory.

do-not-reply at jboss.org do-not-reply at jboss.org
Tue Dec 15 08:35:49 EST 2009


Author: nzamosenchuk
Date: 2009-12-15 08:35:49 -0500 (Tue, 15 Dec 2009)
New Revision: 1058

Modified:
   jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/jbosscache/JbossCacheIndexChangesFilter.java
   jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/AbstractIndex.java
   jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java
Log:
EXOJCR-291: PushState: disabled, added check if index directory removed from other side, added index-reader reopening if it is not current (actual).

Modified: jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/jbosscache/JbossCacheIndexChangesFilter.java
===================================================================
--- jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/jbosscache/JbossCacheIndexChangesFilter.java	2009-12-15 13:25:52 UTC (rev 1057)
+++ jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/jbosscache/JbossCacheIndexChangesFilter.java	2009-12-15 13:35:49 UTC (rev 1058)
@@ -93,7 +93,7 @@
       singletonStoreConfig.setSingletonStoreClass(IndexerSingletonStoreCacheLoader.class.getName());
       //singletonStoreConfig.setSingletonStoreClass(SingletonStoreCacheLoader.class.getName());
       Properties singletonStoreProperties = new Properties();
-      singletonStoreProperties.setProperty("pushStateWhenCoordinator", "true");
+      singletonStoreProperties.setProperty("pushStateWhenCoordinator", "false");
       singletonStoreProperties.setProperty("pushStateWhenCoordinatorTimeout", "10000");
       singletonStoreConfig.setProperties(singletonStoreProperties);
       singletonStoreConfig.setSingletonStoreEnabled(true);

Modified: jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/AbstractIndex.java
===================================================================
--- jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/AbstractIndex.java	2009-12-15 13:25:52 UTC (rev 1057)
+++ jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/AbstractIndex.java	2009-12-15 13:35:49 UTC (rev 1058)
@@ -49,575 +49,682 @@
  * Concurrent access by <b>one</b> modifying thread and multiple read-only
  * threads is safe!
  */
-abstract class AbstractIndex {
+abstract class AbstractIndex
+{
 
-    /** The logger instance for this class */
-    private static final Logger log = LoggerFactory.getLogger(AbstractIndex.class);
+   /** The logger instance for this class */
+   private static final Logger log = LoggerFactory.getLogger(AbstractIndex.class);
 
-    /** PrintStream that pipes all calls to println(String) into log.info() */
-    private static final LoggingPrintStream STREAM_LOGGER = new LoggingPrintStream();
+   /** PrintStream that pipes all calls to println(String) into log.info() */
+   private static final LoggingPrintStream STREAM_LOGGER = new LoggingPrintStream();
 
-    /** Executor with a pool size equal to the number of available processors */
-    private static final DynamicPooledExecutor EXECUTOR = new DynamicPooledExecutor();
+   /** Executor with a pool size equal to the number of available processors */
+   private static final DynamicPooledExecutor EXECUTOR = new DynamicPooledExecutor();
 
-    /** The currently set IndexWriter or <code>null</code> if none is set */
-    private IndexWriter indexWriter;
+   /** The currently set IndexWriter or <code>null</code> if none is set */
+   private IndexWriter indexWriter;
 
-    /** The currently set IndexReader or <code>null</code> if none is set */
-    private CommittableIndexReader indexReader;
+   /** The currently set IndexReader or <code>null</code> if none is set */
+   private CommittableIndexReader indexReader;
 
-    /** The underlying Directory where the index is stored */
-    private Directory directory;
+   /** The underlying Directory where the index is stored */
+   private Directory directory;
 
-    /** Analyzer we use to tokenize text */
-    private Analyzer analyzer;
+   /** Analyzer we use to tokenize text */
+   private Analyzer analyzer;
 
-    /** The similarity in use for indexing and searching. */
-    private final Similarity similarity;
+   /** The similarity in use for indexing and searching. */
+   private final Similarity similarity;
 
-    /** Compound file flag */
-    private boolean useCompoundFile = true;
+   /** Compound file flag */
+   private boolean useCompoundFile = true;
 
-    /** maxFieldLength config parameter */
-    private int maxFieldLength = SearchIndex.DEFAULT_MAX_FIELD_LENGTH;
+   /** maxFieldLength config parameter */
+   private int maxFieldLength = SearchIndex.DEFAULT_MAX_FIELD_LENGTH;
 
-    /** termInfosIndexDivisor config parameter */
-    private int termInfosIndexDivisor = SearchIndex.DEFAULT_TERM_INFOS_INDEX_DIVISOR;
+   /** termInfosIndexDivisor config parameter */
+   private int termInfosIndexDivisor = SearchIndex.DEFAULT_TERM_INFOS_INDEX_DIVISOR;
 
-    /**
-     * The document number cache if this index may use one.
-     */
-    private DocNumberCache cache;
+   /**
+    * The document number cache if this index may use one.
+    */
+   private DocNumberCache cache;
 
-    /** The shared IndexReader for all read-only IndexReaders */
-    private SharedIndexReader sharedReader;
+   /** The shared IndexReader for all read-only IndexReaders */
+   private SharedIndexReader sharedReader;
 
-    /**
-     * The most recent read-only reader if there is any.
-     */
-    private ReadOnlyIndexReader readOnlyReader;
+   /**
+    * The most recent read-only reader if there is any.
+    */
+   private ReadOnlyIndexReader readOnlyReader;
 
-    /**
-     * The indexing queue.
-     */
-    private IndexingQueue indexingQueue;
+   /**
+    * The indexing queue.
+    */
+   private IndexingQueue indexingQueue;
 
-    /**
-     * Flag that indicates whether there was an index present in the directory
-     * when this AbstractIndex was created.
-     */
-    private boolean isExisting;
+   /**
+    * Flag that indicates whether there was an index present in the directory
+    * when this AbstractIndex was created.
+    */
+   private boolean isExisting;
 
-    /**
-     * Constructs an index with an <code>analyzer</code> and a
-     * <code>directory</code>.
-     *
-     * @param analyzer      the analyzer for text tokenizing.
-     * @param similarity    the similarity implementation.
-     * @param directory     the underlying directory.
-     * @param cache         the document number cache if this index should use
-     *                      one; otherwise <code>cache</code> is
-     *                      <code>null</code>.
-     * @param indexingQueue the indexing queue.
-     * @throws IOException if the index cannot be initialized.
-     */
-    AbstractIndex(Analyzer analyzer,
-                  Similarity similarity,
-                  Directory directory,
-                  DocNumberCache cache,
-                  IndexingQueue indexingQueue) throws IOException {
-        this.analyzer = analyzer;
-        this.similarity = similarity;
-        this.directory = directory;
-        this.cache = cache;
-        this.indexingQueue = indexingQueue;
-        this.isExisting = IndexReader.indexExists(directory);
+   /**
+    * Constructs an index with an <code>analyzer</code> and a
+    * <code>directory</code>.
+    *
+    * @param analyzer      the analyzer for text tokenizing.
+    * @param similarity    the similarity implementation.
+    * @param directory     the underlying directory.
+    * @param cache         the document number cache if this index should use
+    *                      one; otherwise <code>cache</code> is
+    *                      <code>null</code>.
+    * @param indexingQueue the indexing queue.
+    * @throws IOException if the index cannot be initialized.
+    */
+   AbstractIndex(Analyzer analyzer, Similarity similarity, Directory directory, DocNumberCache cache,
+      IndexingQueue indexingQueue) throws IOException
+   {
+      this.analyzer = analyzer;
+      this.similarity = similarity;
+      this.directory = directory;
+      this.cache = cache;
+      this.indexingQueue = indexingQueue;
+      this.isExisting = IndexReader.indexExists(directory);
 
-        if (!isExisting) {
-            indexWriter = new IndexWriter(directory, analyzer,
-                    IndexWriter.MaxFieldLength.LIMITED);
-            // immediately close, now that index has been created
-            indexWriter.close();
-            indexWriter = null;
-        }
-    }
+      if (!isExisting)
+      {
+         indexWriter = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+         // immediately close, now that index has been created
+         indexWriter.close();
+         indexWriter = null;
+      }
+   }
 
-    /**
-     * Default implementation returns the same instance as passed
-     * in the constructor.
-     *
-     * @return the directory instance passed in the constructor
-     */
-    Directory getDirectory() {
-        return directory;
-    }
+   /**
+    * Default implementation returns the same instance as passed
+    * in the constructor.
+    *
+    * @return the directory instance passed in the constructor
+    */
+   Directory getDirectory()
+   {
+      return directory;
+   }
 
-    /**
-     * Returns <code>true</code> if this index was openend on a directory with
-     * an existing index in it; <code>false</code> otherwise.
-     *
-     * @return <code>true</code> if there was an index present when this index
-     *          was created; <code>false</code> otherwise.
-     */
-    boolean isExisting() {
-        return isExisting;
-    }
+   /**
+    * Returns <code>true</code> if this index was openend on a directory with
+    * an existing index in it; <code>false</code> otherwise.
+    *
+    * @return <code>true</code> if there was an index present when this index
+    *          was created; <code>false</code> otherwise.
+    */
+   boolean isExisting()
+   {
+      return isExisting;
+   }
 
-    /**
-     * Adds documents to this index and invalidates the shared reader.
-     *
-     * @param docs the documents to add.
-     * @throws IOException if an error occurs while writing to the index.
-     */
-    void addDocuments(Document[] docs) throws IOException {
-        final IndexWriter writer = getIndexWriter();
-        DynamicPooledExecutor.Command[] commands =
-                new DynamicPooledExecutor.Command[docs.length];
-        for (int i = 0; i < docs.length; i++) {
-            // check if text extractor completed its work
-            final Document doc = getFinishedDocument(docs[i]);
-            // create a command for inverting the document
-            commands[i] = new DynamicPooledExecutor.Command() {
-                public Object call() throws Exception {
-                    long time = System.currentTimeMillis();
-                    writer.addDocument(doc);
-                    return new Long(System.currentTimeMillis() - time);
-                }
-            };
-        }
-        DynamicPooledExecutor.Result[] results = EXECUTOR.executeAndWait(commands);
-        invalidateSharedReader();
-        IOException ex = null;
-        for (int i = 0; i < results.length; i++) {
-            if (results[i].getException() != null) {
-                Throwable cause = results[i].getException().getCause();
-                if (ex == null) {
-                    // only throw the first exception
-                    if (cause instanceof IOException) {
-                        ex = (IOException) cause;
-                    } else {
-                        throw Util.createIOException(cause);
-                    }
-                } else {
-                    // all others are logged
-                    log.warn("Exception while inverting document", cause);
-                }
-            } else {
-                log.debug("Inverted document in {} ms", results[i].get());
+   /**
+    * Adds documents to this index and invalidates the shared reader.
+    *
+    * @param docs the documents to add.
+    * @throws IOException if an error occurs while writing to the index.
+    */
+   void addDocuments(Document[] docs) throws IOException
+   {
+      final IndexWriter writer = getIndexWriter();
+      DynamicPooledExecutor.Command[] commands = new DynamicPooledExecutor.Command[docs.length];
+      for (int i = 0; i < docs.length; i++)
+      {
+         // check if text extractor completed its work
+         final Document doc = getFinishedDocument(docs[i]);
+         // create a command for inverting the document
+         commands[i] = new DynamicPooledExecutor.Command()
+         {
+            public Object call() throws Exception
+            {
+               long time = System.currentTimeMillis();
+               writer.addDocument(doc);
+               return new Long(System.currentTimeMillis() - time);
             }
-        }
-        if (ex != null) {
-            throw ex;
-        }
-    }
+         };
+      }
+      DynamicPooledExecutor.Result[] results = EXECUTOR.executeAndWait(commands);
+      invalidateSharedReader();
+      IOException ex = null;
+      for (int i = 0; i < results.length; i++)
+      {
+         if (results[i].getException() != null)
+         {
+            Throwable cause = results[i].getException().getCause();
+            if (ex == null)
+            {
+               // only throw the first exception
+               if (cause instanceof IOException)
+               {
+                  ex = (IOException)cause;
+               }
+               else
+               {
+                  throw Util.createIOException(cause);
+               }
+            }
+            else
+            {
+               // all others are logged
+               log.warn("Exception while inverting document", cause);
+            }
+         }
+         else
+         {
+            log.debug("Inverted document in {} ms", results[i].get());
+         }
+      }
+      if (ex != null)
+      {
+         throw ex;
+      }
+   }
 
-    /**
-     * Removes the document from this index. This call will not invalidate
-     * the shared reader. If a subclass whishes to do so, it should overwrite
-     * this method and call {@link #invalidateSharedReader()}.
-     *
-     * @param idTerm the id term of the document to remove.
-     * @throws IOException if an error occurs while removing the document.
-     * @return number of documents deleted
-     */
-    int removeDocument(Term idTerm) throws IOException {
-        return getIndexReader().deleteDocuments(idTerm);
-    }
+   /**
+    * Removes the document from this index. This call will not invalidate
+    * the shared reader. If a subclass whishes to do so, it should overwrite
+    * this method and call {@link #invalidateSharedReader()}.
+    *
+    * @param idTerm the id term of the document to remove.
+    * @throws IOException if an error occurs while removing the document.
+    * @return number of documents deleted
+    */
+   int removeDocument(Term idTerm) throws IOException
+   {
+      return getIndexReader().deleteDocuments(idTerm);
+   }
 
-    /**
-     * Returns an <code>IndexReader</code> on this index. This index reader
-     * may be used to delete documents.
-     *
-     * @return an <code>IndexReader</code> on this index.
-     * @throws IOException if the reader cannot be obtained.
-     */
-    protected synchronized CommittableIndexReader getIndexReader() throws IOException {
-        if (indexWriter != null) {
-            indexWriter.close();
-            log.debug("closing IndexWriter.");
-            indexWriter = null;
-        }
-        if (indexReader == null) {
-            IndexReader reader = IndexReader.open(getDirectory());
-            reader.setTermInfosIndexDivisor(termInfosIndexDivisor);
-            indexReader = new CommittableIndexReader(reader);
-        }
-        return indexReader;
-    }
+   /**
+    * Returns an <code>IndexReader</code> on this index. This index reader
+    * may be used to delete documents.
+    *
+    * @return an <code>IndexReader</code> on this index.
+    * @throws IOException if the reader cannot be obtained.
+    */
+   protected synchronized CommittableIndexReader getIndexReader() throws IOException
+   {
+      if (indexWriter != null)
+      {
+         indexWriter.close();
+         log.debug("closing IndexWriter.");
+         indexWriter = null;
+      }
+      
+      if (indexReader == null || !indexReader.isCurrent())
+      {
+         IndexReader reader = IndexReader.open(getDirectory());
+         reader.setTermInfosIndexDivisor(termInfosIndexDivisor);
+         indexReader = new CommittableIndexReader(reader);
+      }
+      return indexReader;
+   }
 
-    /**
-     * Returns a read-only index reader, that can be used concurrently with
-     * other threads writing to this index. The returned index reader is
-     * read-only, that is, any attempt to delete a document from the index
-     * will throw an <code>UnsupportedOperationException</code>.
-     *
-     * @param initCache if the caches in the index reader should be initialized
-     *          before the index reader is returned.
-     * @return a read-only index reader.
-     * @throws IOException if an error occurs while obtaining the index reader.
-     */
-    synchronized ReadOnlyIndexReader getReadOnlyIndexReader(boolean initCache)
-            throws IOException {
-        // get current modifiable index reader
-        CommittableIndexReader modifiableReader = getIndexReader();
-        long modCount = modifiableReader.getModificationCount();
-        if (readOnlyReader != null) {
-            if (readOnlyReader.getDeletedDocsVersion() == modCount) {
-                // reader up-to-date
-                readOnlyReader.acquire();
-                return readOnlyReader;
-            } else {
-                // reader outdated
-                if (readOnlyReader.getRefCount() == 1) {
-                    // not in use, except by this index
-                    // update the reader
-                    readOnlyReader.updateDeletedDocs(modifiableReader);
-                    readOnlyReader.acquire();
-                    return readOnlyReader;
-                } else {
-                    // cannot update reader, it is still in use
-                    // need to create a new instance
-                    readOnlyReader.release();
-                    readOnlyReader = null;
-                }
+   /**
+    * Returns a read-only index reader, that can be used concurrently with
+    * other threads writing to this index. The returned index reader is
+    * read-only, that is, any attempt to delete a document from the index
+    * will throw an <code>UnsupportedOperationException</code>.
+    *
+    * @param initCache if the caches in the index reader should be initialized
+    *          before the index reader is returned.
+    * @return a read-only index reader.
+    * @throws IOException if an error occurs while obtaining the index reader.
+    */
+   synchronized ReadOnlyIndexReader getReadOnlyIndexReader(boolean initCache) throws IOException
+   {
+      // get current modifiable index reader
+      CommittableIndexReader modifiableReader = getIndexReader();
+      long modCount = modifiableReader.getModificationCount();
+      if (readOnlyReader != null)
+      {
+         if (readOnlyReader.getDeletedDocsVersion() == modCount)
+         {
+            // reader up-to-date
+            readOnlyReader.acquire();
+            return readOnlyReader;
+         }
+         else
+         {
+            // reader outdated
+            if (readOnlyReader.getRefCount() == 1)
+            {
+               // not in use, except by this index
+               // update the reader
+               readOnlyReader.updateDeletedDocs(modifiableReader);
+               readOnlyReader.acquire();
+               return readOnlyReader;
             }
-        }
-        // if we get here there is no up-to-date read-only reader
-        // capture snapshot of deleted documents
-        BitSet deleted = new BitSet(modifiableReader.maxDoc());
-        for (int i = 0; i < modifiableReader.maxDoc(); i++) {
-            if (modifiableReader.isDeleted(i)) {
-                deleted.set(i);
+            else
+            {
+               // cannot update reader, it is still in use
+               // need to create a new instance
+               readOnlyReader.release();
+               readOnlyReader = null;
             }
-        }
-        if (sharedReader == null) {
-            // create new shared reader
-            IndexReader reader = IndexReader.open(getDirectory(), true);
-            reader.setTermInfosIndexDivisor(termInfosIndexDivisor);
-            CachingIndexReader cr = new CachingIndexReader(
-                    reader, cache, initCache);
-            sharedReader = new SharedIndexReader(cr);
-        }
-        readOnlyReader = new ReadOnlyIndexReader(sharedReader, deleted, modCount);
-        readOnlyReader.acquire();
-        return readOnlyReader;
-    }
+         }
+      }
+      // if we get here there is no up-to-date read-only reader
+      // capture snapshot of deleted documents
+      BitSet deleted = new BitSet(modifiableReader.maxDoc());
+      for (int i = 0; i < modifiableReader.maxDoc(); i++)
+      {
+         if (modifiableReader.isDeleted(i))
+         {
+            deleted.set(i);
+         }
+      }
+      if (sharedReader == null)
+      {
+         // create new shared reader
+         IndexReader reader = IndexReader.open(getDirectory(), true);
+         reader.setTermInfosIndexDivisor(termInfosIndexDivisor);
+         CachingIndexReader cr = new CachingIndexReader(reader, cache, initCache);
+         sharedReader = new SharedIndexReader(cr);
+      }
+      readOnlyReader = new ReadOnlyIndexReader(sharedReader, deleted, modCount);
+      readOnlyReader.acquire();
+      return readOnlyReader;
+   }
 
-    /**
-     * Returns a read-only index reader, that can be used concurrently with
-     * other threads writing to this index. The returned index reader is
-     * read-only, that is, any attempt to delete a document from the index
-     * will throw an <code>UnsupportedOperationException</code>.
-     *
-     * @return a read-only index reader.
-     * @throws IOException if an error occurs while obtaining the index reader.
-     */
-    protected ReadOnlyIndexReader getReadOnlyIndexReader()
-            throws IOException {
-        return getReadOnlyIndexReader(false);
-    }
+   /**
+    * Returns a read-only index reader, that can be used concurrently with
+    * other threads writing to this index. The returned index reader is
+    * read-only, that is, any attempt to delete a document from the index
+    * will throw an <code>UnsupportedOperationException</code>.
+    *
+    * @return a read-only index reader.
+    * @throws IOException if an error occurs while obtaining the index reader.
+    */
+   protected ReadOnlyIndexReader getReadOnlyIndexReader() throws IOException
+   {
+      return getReadOnlyIndexReader(false);
+   }
 
-    /**
-     * Returns an <code>IndexWriter</code> on this index.
-     * @return an <code>IndexWriter</code> on this index.
-     * @throws IOException if the writer cannot be obtained.
-     */
-    protected synchronized IndexWriter getIndexWriter() throws IOException {
-        if (indexReader != null) {
-            indexReader.close();
-            log.debug("closing IndexReader.");
-            indexReader = null;
-        }
-        if (indexWriter == null) {
-            indexWriter = new IndexWriter(getDirectory(), analyzer,
-                    new IndexWriter.MaxFieldLength(maxFieldLength));
-            indexWriter.setSimilarity(similarity);
-            indexWriter.setUseCompoundFile(useCompoundFile);
-            indexWriter.setInfoStream(STREAM_LOGGER);
-        }
-        return indexWriter;
-    }
+   /**
+    * Returns an <code>IndexWriter</code> on this index.
+    * @return an <code>IndexWriter</code> on this index.
+    * @throws IOException if the writer cannot be obtained.
+    */
+   protected synchronized IndexWriter getIndexWriter() throws IOException
+   {
+      if (indexReader != null)
+      {
+         indexReader.close();
+         log.debug("closing IndexReader.");
+         indexReader = null;
+      }
+      if (indexWriter == null)
+      {
+         indexWriter = new IndexWriter(getDirectory(), analyzer, new IndexWriter.MaxFieldLength(maxFieldLength));
+         indexWriter.setSimilarity(similarity);
+         indexWriter.setUseCompoundFile(useCompoundFile);
+         indexWriter.setInfoStream(STREAM_LOGGER);
+      }
+      return indexWriter;
+   }
 
-    /**
-     * Commits all pending changes to the underlying <code>Directory</code>.
-     * @throws IOException if an error occurs while commiting changes.
-     */
-    protected void commit() throws IOException {
-        commit(false);
-    }
+   /**
+    * Commits all pending changes to the underlying <code>Directory</code>.
+    * @throws IOException if an error occurs while commiting changes.
+    */
+   protected void commit() throws IOException
+   {
+      commit(false);
+   }
 
-    /**
-     * Commits all pending changes to the underlying <code>Directory</code>.
-     *
-     * @param optimize if <code>true</code> the index is optimized after the
-     *                 commit.
-     * @throws IOException if an error occurs while commiting changes.
-     */
-    protected synchronized void commit(boolean optimize) throws IOException {
-        if (indexReader != null) {
-            log.debug("committing IndexReader.");
-            indexReader.flush();
-        }
-        if (indexWriter != null) {
-            log.debug("committing IndexWriter.");
-            indexWriter.commit();
-        }
-        // optimize if requested
-        if (optimize) {
-            IndexWriter writer = getIndexWriter();
-            writer.optimize();
-            writer.close();
-            indexWriter = null;
-        }
-    }
+   /**
+    * Commits all pending changes to the underlying <code>Directory</code>.
+    *
+    * @param optimize if <code>true</code> the index is optimized after the
+    *                 commit.
+    * @throws IOException if an error occurs while commiting changes.
+    */
+   protected synchronized void commit(boolean optimize) throws IOException
+   {
+      if (indexReader != null)
+      {
+         log.debug("committing IndexReader.");
+         indexReader.flush();
+      }
+      if (indexWriter != null)
+      {
+         log.debug("committing IndexWriter.");
+         indexWriter.commit();
+      }
+      // optimize if requested
+      if (optimize)
+      {
+         IndexWriter writer = getIndexWriter();
+         writer.optimize();
+         writer.close();
+         indexWriter = null;
+      }
+   }
 
-    /**
-     * Closes this index, releasing all held resources.
-     */
-    synchronized void close() {
-        releaseWriterAndReaders();
-        if (directory != null) {
-            try {
-                directory.close();
-            } catch (IOException e) {
-                directory = null;
-            }
-        }
-    }
+   /**
+    * Closes this index, releasing all held resources.
+    */
+   synchronized void close()
+   {
+      releaseWriterAndReaders();
+      if (directory != null)
+      {
+         try
+         {
+            directory.close();
+         }
+         catch (IOException e)
+         {
+            directory = null;
+         }
+      }
+   }
 
-    /**
-     * Releases all potentially held index writer and readers.
-     */
-    protected void releaseWriterAndReaders() {
-        if (indexWriter != null) {
-            try {
-                indexWriter.close();
-            } catch (IOException e) {
-                log.warn("Exception closing index writer: " + e.toString());
-            }
-            indexWriter = null;
-        }
-        if (indexReader != null) {
-            try {
-                indexReader.close();
-            } catch (IOException e) {
-                log.warn("Exception closing index reader: " + e.toString());
-            }
-            indexReader = null;
-        }
-        if (readOnlyReader != null) {
-            try {
-                readOnlyReader.release();
-            } catch (IOException e) {
-                log.warn("Exception closing index reader: " + e.toString());
-            }
-            readOnlyReader = null;
-        }
-        if (sharedReader != null) {
-            try {
-                sharedReader.release();
-            } catch (IOException e) {
-                log.warn("Exception closing index reader: " + e.toString());
-            }
-            sharedReader = null;
-        }
-    }
-
-    /**
-     * @return the number of bytes this index occupies in memory.
-     */
-    synchronized long getRamSizeInBytes() {
-        if (indexWriter != null) {
-            return indexWriter.ramSizeInBytes();
-        } else {
-            return 0;
-        }
-    }
-
-    /**
-     * Closes the shared reader.
-     *
-     * @throws IOException if an error occurs while closing the reader.
-     */
-    protected synchronized void invalidateSharedReader() throws IOException {
-        // also close the read-only reader
-        if (readOnlyReader != null) {
+   /**
+    * Releases all potentially held index writer and readers.
+    */
+   protected void releaseWriterAndReaders()
+   {
+      if (indexWriter != null)
+      {
+         try
+         {
+            indexWriter.close();
+         }
+         catch (IOException e)
+         {
+            log.warn("Exception closing index writer: " + e.toString());
+         }
+         indexWriter = null;
+      }
+      if (indexReader != null)
+      {
+         try
+         {
+            indexReader.close();
+         }
+         catch (IOException e)
+         {
+            log.warn("Exception closing index reader: " + e.toString());
+         }
+         indexReader = null;
+      }
+      if (readOnlyReader != null)
+      {
+         try
+         {
             readOnlyReader.release();
-            readOnlyReader = null;
-        }
-        // invalidate shared reader
-        if (sharedReader != null) {
+         }
+         catch (IOException e)
+         {
+            log.warn("Exception closing index reader: " + e.toString());
+         }
+         readOnlyReader = null;
+      }
+      if (sharedReader != null)
+      {
+         try
+         {
             sharedReader.release();
-            sharedReader = null;
-        }
-    }
+         }
+         catch (IOException e)
+         {
+            log.warn("Exception closing index reader: " + e.toString());
+         }
+         sharedReader = null;
+      }
+   }
 
-    /**
-     * Returns a document that is finished with text extraction and is ready to
-     * be added to the index.
-     *
-     * @param doc the document to check.
-     * @return <code>doc</code> if it is finished already or a stripped down
-     *         copy of <code>doc</code> without text extractors.
-     * @throws IOException if the document cannot be added to the indexing
-     *                     queue.
-     */
-    private Document getFinishedDocument(Document doc) throws IOException {
-        if (!Util.isDocumentReady(doc)) {
-            Document copy = new Document();
-            // mark the document that reindexing is required
-            copy.add(new Field(FieldNames.REINDEXING_REQUIRED, "",
-                    Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
-            Iterator fields = doc.getFields().iterator();
-            while (fields.hasNext()) {
-                Fieldable f = (Fieldable) fields.next();
-                Fieldable field = null;
-                Field.TermVector tv = getTermVectorParameter(f);
-                Field.Store stored = getStoreParameter(f);
-                Field.Index indexed = getIndexParameter(f);
-                if (f instanceof LazyTextExtractorField || f.readerValue() != null) {
-                    // replace all readers with empty string reader
-                    field = new Field(f.name(), new StringReader(""), tv);
-                } else if (f.stringValue() != null) {
-                    field = new Field(f.name(), f.stringValue(),
-                            stored, indexed, tv);
-                } else if (f.isBinary()) {
-                    field = new Field(f.name(), f.binaryValue(), stored);
-                }
-                if (field != null) {
-                    field.setOmitNorms(f.getOmitNorms());
-                    copy.add(field);
-                }
+   /**
+    * @return the number of bytes this index occupies in memory.
+    */
+   synchronized long getRamSizeInBytes()
+   {
+      if (indexWriter != null)
+      {
+         return indexWriter.ramSizeInBytes();
+      }
+      else
+      {
+         return 0;
+      }
+   }
+
+   /**
+    * Closes the shared reader.
+    *
+    * @throws IOException if an error occurs while closing the reader.
+    */
+   protected synchronized void invalidateSharedReader() throws IOException
+   {
+      // also close the read-only reader
+      if (readOnlyReader != null)
+      {
+         readOnlyReader.release();
+         readOnlyReader = null;
+      }
+      // invalidate shared reader
+      if (sharedReader != null)
+      {
+         sharedReader.release();
+         sharedReader = null;
+      }
+   }
+
+   /**
+    * Returns a document that is finished with text extraction and is ready to
+    * be added to the index.
+    *
+    * @param doc the document to check.
+    * @return <code>doc</code> if it is finished already or a stripped down
+    *         copy of <code>doc</code> without text extractors.
+    * @throws IOException if the document cannot be added to the indexing
+    *                     queue.
+    */
+   private Document getFinishedDocument(Document doc) throws IOException
+   {
+      if (!Util.isDocumentReady(doc))
+      {
+         Document copy = new Document();
+         // mark the document that reindexing is required
+         copy.add(new Field(FieldNames.REINDEXING_REQUIRED, "", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
+         Iterator fields = doc.getFields().iterator();
+         while (fields.hasNext())
+         {
+            Fieldable f = (Fieldable)fields.next();
+            Fieldable field = null;
+            Field.TermVector tv = getTermVectorParameter(f);
+            Field.Store stored = getStoreParameter(f);
+            Field.Index indexed = getIndexParameter(f);
+            if (f instanceof LazyTextExtractorField || f.readerValue() != null)
+            {
+               // replace all readers with empty string reader
+               field = new Field(f.name(), new StringReader(""), tv);
             }
-            // schedule the original document for later indexing
-            Document existing = indexingQueue.addDocument(doc);
-            if (existing != null) {
-                // the queue already contained a pending document for this
-                // node. -> dispose the document
-                Util.disposeDocument(existing);
+            else if (f.stringValue() != null)
+            {
+               field = new Field(f.name(), f.stringValue(), stored, indexed, tv);
             }
-            // use the stripped down copy for now
-            doc = copy;
-        }
-        return doc;
-    }
+            else if (f.isBinary())
+            {
+               field = new Field(f.name(), f.binaryValue(), stored);
+            }
+            if (field != null)
+            {
+               field.setOmitNorms(f.getOmitNorms());
+               copy.add(field);
+            }
+         }
+         // schedule the original document for later indexing
+         Document existing = indexingQueue.addDocument(doc);
+         if (existing != null)
+         {
+            // the queue already contained a pending document for this
+            // node. -> dispose the document
+            Util.disposeDocument(existing);
+         }
+         // use the stripped down copy for now
+         doc = copy;
+      }
+      return doc;
+   }
 
-    //-------------------------< properties >-----------------------------------
+   //-------------------------< properties >-----------------------------------
 
-    /**
-     * The lucene index writer property: useCompountFile
-     */
-    void setUseCompoundFile(boolean b) {
-        useCompoundFile = b;
-        if (indexWriter != null) {
-            indexWriter.setUseCompoundFile(b);
-        }
-    }
+   /**
+    * The lucene index writer property: useCompountFile
+    */
+   void setUseCompoundFile(boolean b)
+   {
+      useCompoundFile = b;
+      if (indexWriter != null)
+      {
+         indexWriter.setUseCompoundFile(b);
+      }
+   }
 
-    /**
-     * The lucene index writer property: maxFieldLength
-     */
-    void setMaxFieldLength(int maxFieldLength) {
-        this.maxFieldLength = maxFieldLength;
-        if (indexWriter != null) {
-            indexWriter.setMaxFieldLength(maxFieldLength);
-        }
-    }
+   /**
+    * The lucene index writer property: maxFieldLength
+    */
+   void setMaxFieldLength(int maxFieldLength)
+   {
+      this.maxFieldLength = maxFieldLength;
+      if (indexWriter != null)
+      {
+         indexWriter.setMaxFieldLength(maxFieldLength);
+      }
+   }
 
-    /**
-     * @return the current value for termInfosIndexDivisor.
-     */
-    public int getTermInfosIndexDivisor() {
-        return termInfosIndexDivisor;
-    }
+   /**
+    * @return the current value for termInfosIndexDivisor.
+    */
+   public int getTermInfosIndexDivisor()
+   {
+      return termInfosIndexDivisor;
+   }
 
-    /**
-     * Sets a new value for termInfosIndexDivisor.
-     *
-     * @param termInfosIndexDivisor the new value.
-     */
-    public void setTermInfosIndexDivisor(int termInfosIndexDivisor) {
-        this.termInfosIndexDivisor = termInfosIndexDivisor;
-    }
+   /**
+    * Sets a new value for termInfosIndexDivisor.
+    *
+    * @param termInfosIndexDivisor the new value.
+    */
+   public void setTermInfosIndexDivisor(int termInfosIndexDivisor)
+   {
+      this.termInfosIndexDivisor = termInfosIndexDivisor;
+   }
 
-    //------------------------------< internal >--------------------------------
+   //------------------------------< internal >--------------------------------
 
-    /**
-     * Returns the index parameter set on <code>f</code>.
-     *
-     * @param f a lucene field.
-     * @return the index parameter on <code>f</code>.
-     */
-    private Field.Index getIndexParameter(Fieldable f) {
-        if (!f.isIndexed()) {
-            return Field.Index.NO;
-        } else if (f.isTokenized()) {
-            return Field.Index.ANALYZED;
-        } else {
-            return Field.Index.NOT_ANALYZED;
-        }
-    }
+   /**
+    * Returns the index parameter set on <code>f</code>.
+    *
+    * @param f a lucene field.
+    * @return the index parameter on <code>f</code>.
+    */
+   private Field.Index getIndexParameter(Fieldable f)
+   {
+      if (!f.isIndexed())
+      {
+         return Field.Index.NO;
+      }
+      else if (f.isTokenized())
+      {
+         return Field.Index.ANALYZED;
+      }
+      else
+      {
+         return Field.Index.NOT_ANALYZED;
+      }
+   }
 
-    /**
-     * Returns the store parameter set on <code>f</code>.
-     *
-     * @param f a lucene field.
-     * @return the store parameter on <code>f</code>.
-     */
-    private Field.Store getStoreParameter(Fieldable f) {
-        if (f.isCompressed()) {
-            return Field.Store.COMPRESS;
-        } else if (f.isStored()) {
-            return Field.Store.YES;
-        } else {
-            return Field.Store.NO;
-        }
-    }
+   /**
+    * Returns the store parameter set on <code>f</code>.
+    *
+    * @param f a lucene field.
+    * @return the store parameter on <code>f</code>.
+    */
+   private Field.Store getStoreParameter(Fieldable f)
+   {
+      if (f.isCompressed())
+      {
+         return Field.Store.COMPRESS;
+      }
+      else if (f.isStored())
+      {
+         return Field.Store.YES;
+      }
+      else
+      {
+         return Field.Store.NO;
+      }
+   }
 
-    /**
-     * Returns the term vector parameter set on <code>f</code>.
-     *
-     * @param f a lucene field.
-     * @return the term vector parameter on <code>f</code>.
-     */
-    private Field.TermVector getTermVectorParameter(Fieldable f) {
-        if (f.isStorePositionWithTermVector() && f.isStoreOffsetWithTermVector()) {
-            return Field.TermVector.WITH_POSITIONS_OFFSETS;
-        } else if (f.isStorePositionWithTermVector()) {
-            return Field.TermVector.WITH_POSITIONS;
-        } else if (f.isStoreOffsetWithTermVector()) {
-            return Field.TermVector.WITH_OFFSETS;
-        } else if (f.isTermVectorStored()) {
-            return Field.TermVector.YES;
-        } else {
-            return Field.TermVector.NO;
-        }
-    }
+   /**
+    * Returns the term vector parameter set on <code>f</code>.
+    *
+    * @param f a lucene field.
+    * @return the term vector parameter on <code>f</code>.
+    */
+   private Field.TermVector getTermVectorParameter(Fieldable f)
+   {
+      if (f.isStorePositionWithTermVector() && f.isStoreOffsetWithTermVector())
+      {
+         return Field.TermVector.WITH_POSITIONS_OFFSETS;
+      }
+      else if (f.isStorePositionWithTermVector())
+      {
+         return Field.TermVector.WITH_POSITIONS;
+      }
+      else if (f.isStoreOffsetWithTermVector())
+      {
+         return Field.TermVector.WITH_OFFSETS;
+      }
+      else if (f.isTermVectorStored())
+      {
+         return Field.TermVector.YES;
+      }
+      else
+      {
+         return Field.TermVector.NO;
+      }
+   }
 
-    /**
-     * Adapter to pipe info messages from lucene into log messages.
-     */
-    private static final class LoggingPrintStream extends PrintStream {
+   /**
+    * Adapter to pipe info messages from lucene into log messages.
+    */
+   private static final class LoggingPrintStream extends PrintStream
+   {
 
-        /** Buffer print calls until a newline is written */
-        private StringBuffer buffer = new StringBuffer();
+      /** Buffer print calls until a newline is written */
+      private StringBuffer buffer = new StringBuffer();
 
-        public LoggingPrintStream() {
-            super(new OutputStream() {
-                public void write(int b) {
-                    // do nothing
-                }
-            });
-        }
+      public LoggingPrintStream()
+      {
+         super(new OutputStream()
+         {
+            public void write(int b)
+            {
+               // do nothing
+            }
+         });
+      }
 
-        public void print(String s) {
-            buffer.append(s);
-        }
+      public void print(String s)
+      {
+         buffer.append(s);
+      }
 
-        public void println(String s) {
-            buffer.append(s);
-            log.debug(buffer.toString());
-            buffer.setLength(0);
-        }
-    }
+      public void println(String s)
+      {
+         buffer.append(s);
+         log.debug(buffer.toString());
+         buffer.setLength(0);
+      }
+   }
 }

Modified: jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java
===================================================================
--- jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java	2009-12-15 13:25:52 UTC (rev 1057)
+++ jcr/branches/1.12.0-JBC/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java	2009-12-15 13:35:49 UTC (rev 1058)
@@ -30,6 +30,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -834,9 +835,19 @@
             for (int i = 0; i < indexes.size(); i++)
             {
                PersistentIndex pIdx = (PersistentIndex)indexes.get(i);
+               
                if (indexNames.contains(pIdx.getName()))
                {
-                  readerList.add(pIdx.getReadOnlyIndexReader(initCache));
+                  try
+                  {
+                     readerList.add(pIdx.getReadOnlyIndexReader(initCache));
+                  }
+                  catch (FileNotFoundException e)
+                  {
+                   if(directoryManager.hasDirectory(pIdx.getName())){
+                      throw e;
+                   }
+                  }
                }
             }
             readerList.add(volatileIndex.getReadOnlyIndexReader());



More information about the exo-jcr-commits mailing list