Author: epbernard
Date: 2007-01-06 20:05:14 -0500 (Sat, 06 Jan 2007)
New Revision: 11018
Added:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
Removed:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
Log:
ANN-522 avoid deadlocks with multiple directoryproviders
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -13,6 +13,7 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.hibernate.HibernateException;
+import org.hibernate.AssertionFailure;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.store.DirectoryProvider;
@@ -50,6 +51,9 @@
public IndexReader getIndexReader(Class entity) {
//TODO NPEs
DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ //one cannot access a reader for update after a writer has been accessed
+ if ( writers.containsKey( provider ) )
+ throw new AssertionFailure("Tries to read for update a index while a writer is
accessed" + entity);
IndexReader reader = readers.get( provider );
if ( reader != null ) return reader;
lockProvider( provider );
@@ -65,6 +69,17 @@
public IndexWriter getIndexWriter(Class entity) {
DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ //one has to close a reader for update before a writer is accessed
+ IndexReader reader = readers.get( provider );
+ if ( reader != null ) {
+ try {
+ reader.close();
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Exception while closing IndexReader", e );
+ }
+ readers.remove( provider );
+ }
IndexWriter writer = writers.get( provider );
if ( writer != null ) return writer;
lockProvider( provider );
Deleted:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -1,95 +0,0 @@
-//$Id: $
-package org.hibernate.search.backend.impl;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.backend.impl.LuceneWorker;
-import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.Workspace;
-import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.UpdateWork;
-import org.hibernate.search.backend.DeleteWork;
-import org.hibernate.search.backend.AddWork;
-
-/**
- * Batch work until #performWork is called.
- * The work is then executed synchronously or asynchronously
- *
- * @author Emmanuel Bernard
- */
-public class BatchLuceneWorkQueue implements WorkQueue {
- private Workspace workspace;
- private LuceneWorker worker;
- private List<Work> queue = new ArrayList<Work>();
- private boolean sync;
-
- public BatchLuceneWorkQueue(Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders, boolean
sync) {
- workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
- worker = new LuceneWorker( workspace );
- this.sync = sync;
- }
-
- public void add(Work work) {
- //TODO optimize by getting rid of dupe works
- if ( work instanceof UpdateWork ) {
- //split in 2 to optimize the process (reader first, writer next
- queue.add( new DeleteWork( work.getId(), work.getEntity() ) );
- queue.add( new AddWork( work.getId(), work.getEntity(), work.getDocument() ) );
- }
- else {
- queue.add( work );
- }
- }
-
- //TODO implements parallel batchWorkers (one per Directory)
- public void performWork() {
- BatchWorker batchWorker = new BatchWorker( queue, workspace, worker );
- if (sync) {
- batchWorker.run();
- }
- else {
- //TODO pool threads?
- Thread thread = new Thread(batchWorker);
- thread.start();
- }
- }
-
- public void cancelWork() {
- queue.clear();
- }
-
- private class BatchWorker implements Runnable {
- private List<Work> queue;
- private Workspace workspace;
- private LuceneWorker worker;
-
- public BatchWorker(List<Work> queue, Workspace workspace, LuceneWorker worker) {
- this.queue = queue;
- this.workspace = workspace;
- this.worker = worker;
- }
-
- public void run() {
- try {
- //use of index reader
- for ( Work work : queue ) {
- if ( work instanceof DeleteWork ) worker.performWork( work );
- }
- workspace.clean(); //close readers
- for ( Work work : queue ) {
- if ( work instanceof AddWork ) worker.performWork( work );
- }
- }
- finally {
- workspace.clean();
- queue.clear();
- }
- }
- }
-}
Copied:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
(from rev 11017,
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java)
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchLuceneWorkQueue.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -0,0 +1,124 @@
+//$Id: $
+package org.hibernate.search.backend.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.backend.impl.LuceneWorker;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.AddWork;
+
+/**
+ * Batch work until #performWork is called.
+ * The work is then executed synchronously or asynchronously
+ *
+ * @author Emmanuel Bernard
+ */
+public class BatchedWorkQueue implements WorkQueue {
+ private List<Work> queue = new ArrayList<Work>();
+ private boolean sync;
+ Map<Class, DocumentBuilder<Object>> documentBuilders;
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
+
+ public BatchedWorkQueue(Map<Class, DocumentBuilder<Object>>
documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders, boolean
sync) {
+ this.documentBuilders = documentBuilders;
+ this.lockableDirectoryProviders = lockableDirectoryProviders;
+ this.sync = sync;
+ }
+
+ public void add(Work work) {
+ //TODO optimize by getting rid of dupe works
+ if ( work instanceof UpdateWork ) {
+ //split in 2 to optimize the process (reader first, writer next
+ queue.add( new DeleteWork( work.getId(), work.getEntity() ) );
+ queue.add( new AddWork( work.getId(), work.getEntity(), work.getDocument() ) );
+ }
+ else {
+ queue.add( work );
+ }
+ }
+
+ //TODO implements parallel batchWorkers (one per Directory)
+ public void performWork() {
+ BatchWorker batchWorker = new BatchWorker( queue, documentBuilders,
lockableDirectoryProviders );
+ if (sync) {
+ batchWorker.run();
+ }
+ else {
+ //TODO pool threads?
+ Thread thread = new Thread(batchWorker);
+ thread.start();
+ }
+ }
+
+ public void cancelWork() {
+ queue.clear();
+ }
+
+ private class BatchWorker implements Runnable {
+ private List<Work> queue;
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
+ private Map<Class, DocumentBuilder<Object>> documentBuilders;
+
+
+ public BatchWorker(List<Work> queue, Map<Class,
DocumentBuilder<Object>> documentBuilders,
+ Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ this.queue = queue;
+ this.documentBuilders = documentBuilders;
+ this.lockableDirectoryProviders = lockableDirectoryProviders;
+ }
+
+ public void run() {
+ Workspace workspace;
+ LuceneWorker worker;
+ workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
+ worker = new LuceneWorker( workspace );
+ try {
+ deadlockFreeQueue(queue, workspace);
+ for ( Work work : queue ) {
+ worker.performWork( work );
+ }
+ }
+ finally {
+ workspace.clean();
+ queue.clear();
+ }
+ }
+
+ /**
+ * one must lock the directory providers in the exact same order to avoid
+ * dead lock between concurrent threads or processes
+ * To achieve that, the work will be done per directory provider
+ */
+ private void deadlockFreeQueue(List<Work> queue, final Workspace workspace) {
+ Collections.sort( queue, new Comparator<Work>() {
+ public int compare(Work o1, Work o2) {
+ long h1 = getWorkHashCode( o1, workspace );
+ long h2 = getWorkHashCode( o2, workspace );
+ return h1 < h2 ?
+ -1 :
+ h1 == h2 ?
+ 0 :
+ 1;
+ }
+ } );
+ }
+
+ private long getWorkHashCode(Work work, Workspace workspace) {
+ long h = workspace.getDocumentBuilder( work.getEntity() ).hashCode() * 2;
+ if (work instanceof AddWork) h+=1; //addwork after deleteWork
+ return h;
+ }
+ }
+}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -38,7 +38,7 @@
PostTransactionWorkQueueSynchronization txSync =
(PostTransactionWorkQueueSynchronization)
queuePerTransaction.get( transaction );
if ( txSync == null || txSync.isConsumed() ) {
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders,
lockableDirectoryProviders, sync );
+ WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, sync );
txSync = new PostTransactionWorkQueueSynchronization( workQueue, queuePerTransaction
);
transaction.registerSynchronization( txSync );
queuePerTransaction.put(transaction, txSync);
@@ -46,7 +46,7 @@
txSync.add( work );
}
else {
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders,
lockableDirectoryProviders, sync );
+ WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, sync );
PostTransactionWorkQueueSynchronization sync = new
PostTransactionWorkQueueSynchronization( workQueue );
sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -7,7 +7,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
-import javax.transaction.Status;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -15,7 +14,6 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.hibernate.HibernateException;
-import org.hibernate.Transaction;
import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.Configuration;
import org.hibernate.event.AbstractEvent;
@@ -27,17 +25,13 @@
import org.hibernate.event.PostUpdateEvent;
import org.hibernate.event.PostUpdateEventListener;
import org.hibernate.search.Environment;
-import org.hibernate.search.util.WeakIdentityHashMap;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.DeleteWork;
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.Worker;
import org.hibernate.search.backend.WorkerFactory;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
-import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.DirectoryProviderFactory;
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -33,7 +33,7 @@
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.BatchedWorkQueue;
import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.FullTextSession;
@@ -113,7 +113,7 @@
Map<Class, DocumentBuilder<Object>> documentBuilders,
Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
//FIXME should be harmonized with the WorkerFactory?
- WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders,
lockableDirectoryProviders, true );
+ WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, true );
return new PostTransactionWorkQueueSynchronization( workQueue );
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/DirectoryProvider.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -9,9 +9,10 @@
/**
* Set up and provide a Lucene <code>Directory</code>
* <code>equals()</code> and <code>hashCode()</code> must
guaranty equality
- * between two providers pointing to the same underlying Lucene Store
- * This class must be thread safe regarding <code>getDirectory()</code>
- * calls
+ * between two providers pointing to the same underlying Lucene Store.
+ * Besides that, hashCode ordering is used to avoid deadlock when locking a directory
provider.
+ *
+ * This class must be thread safe regarding <code>getDirectory()</code>
calls
*
* @author Emmanuel Bernard
* @author Sylvain Vieujot
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/FSDirectoryProvider.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -75,6 +75,7 @@
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
- return indexName.hashCode();
+ int hash = 11;
+ return 37 * hash + indexName.hashCode();
}
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2007-01-06
17:12:57 UTC (rev 11017)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2007-01-07
01:05:14 UTC (rev 11018)
@@ -51,7 +51,8 @@
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
- return indexName.hashCode();
+ int hash = 7;
+ return 29 * hash + indexName.hashCode();
}
}