[hibernate-commits] Hibernate SVN: r10671 - in branches/Lucene_Integration/HibernateExt/metadata/src: java/org/hibernate/lucene/event java/org/hibernate/lucene/query test/org/hibernate/lucene/test

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Tue Oct 31 23:27:28 EST 2006


Author: epbernard
Date: 2006-10-31 23:27:26 -0500 (Tue, 31 Oct 2006)
New Revision: 10671

Added:
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/AddWork.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/DeleteWork.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/UpdateWork.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Work.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkInSynchronization.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkQueue.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Worker.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Workspace.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TransactionTest.java
Modified:
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/MappingTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TestCase.java
Log:
ANN-482 use post-* event rather than post-commit-*, use transaction synchronization
Introduce a workspace, a work queue, and a list of work.
Work, WorkQueue are expected to be reusable to plug a different apporach (like asynchronous)

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/AddWork.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/AddWork.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/AddWork.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,15 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class AddWork extends Work {
+	public AddWork(Serializable id, Class entity, Document document) {
+		super( id, entity, document );
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/DeleteWork.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/DeleteWork.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/DeleteWork.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,13 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.Serializable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DeleteWork extends Work {
+	public DeleteWork(Serializable id, Class entity) {
+		super( id, entity );
+	}
+}

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -9,6 +9,7 @@
 import java.util.Set;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
+import javax.transaction.Status;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -19,11 +20,9 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.Term;
 import org.hibernate.HibernateException;
-import org.hibernate.reflection.ReflectionManager;
-import org.hibernate.reflection.XClass;
-import org.hibernate.reflection.java.JavaXFactory;
+import org.hibernate.cfg.AnnotationConfiguration;
 import org.hibernate.cfg.Configuration;
-import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.event.AbstractEvent;
 import org.hibernate.event.Initializable;
 import org.hibernate.event.PostDeleteEvent;
 import org.hibernate.event.PostDeleteEventListener;
@@ -37,6 +36,8 @@
 import org.hibernate.lucene.store.DirectoryProvider;
 import org.hibernate.lucene.store.DirectoryProviderFactory;
 import org.hibernate.mapping.PersistentClass;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
 import org.hibernate.util.ReflectHelper;
 
 /**
@@ -48,151 +49,173 @@
  * @author Mattias Arbin
  */
 //TODO work on sharing the same indexWriters and readers across a single post operation...
+//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
 public class LuceneEventListener implements PostDeleteEventListener, PostInsertEventListener,
-        PostUpdateEventListener, Initializable {
-    ReflectionManager reflectionManager;
+		PostUpdateEventListener, Initializable {
+	ReflectionManager reflectionManager;
 
-    //FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
-    @Deprecated
-    public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
-        return documentBuilders;
-    }
+	//FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
+	@Deprecated
+	public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+		return documentBuilders;
+	}
 
 
-    private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
-    //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
-    private Map<DirectoryProvider, Lock> indexLock = new HashMap<DirectoryProvider, Lock>();
-    private boolean initialized;
+	private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+	//keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+	private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+			new HashMap<DirectoryProvider, ReentrantLock>();
+	private boolean initialized;
 
-    private static final Log log = LogFactory.getLog(LuceneEventListener.class);
+	private static final Log log = LogFactory.getLog( LuceneEventListener.class );
 
-    public void initialize(Configuration cfg) {
-        if (initialized) return;
-        //yuk
-        reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+	public void initialize(Configuration cfg) {
+		if ( initialized ) return;
+		//yuk
+		reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
 
-        Class analyzerClass;
-        String analyzerClassName = cfg.getProperty(Environment.ANALYZER_CLASS);
-        if (analyzerClassName != null) {
-            try {
-                analyzerClass = ReflectHelper.classForName(analyzerClassName);
-            }
-            catch (Exception e) {
-                throw new HibernateException(
-                        "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
-                        e
-                );
-            }
-        } else {
-            analyzerClass = StandardAnalyzer.class;
-        }
-        // Initialize analyzer
-        Analyzer analyzer;
-        try {
-            analyzer = (Analyzer) analyzerClass.newInstance();
-        }
-        catch (ClassCastException e) {
-            throw new HibernateException(
-                    "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
-            );
-        }
-        catch (Exception e) {
-            throw new HibernateException("Failed to instantiate lucene analyzer with type " + analyzerClassName);
-        }
+		Class analyzerClass;
+		String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+		if ( analyzerClassName != null ) {
+			try {
+				analyzerClass = ReflectHelper.classForName( analyzerClassName );
+			}
+			catch (Exception e) {
+				throw new HibernateException(
+						"Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
+						e
+				);
+			}
+		}
+		else {
+			analyzerClass = StandardAnalyzer.class;
+		}
+		// Initialize analyzer
+		Analyzer analyzer;
+		try {
+			analyzer = (Analyzer) analyzerClass.newInstance();
+		}
+		catch (ClassCastException e) {
+			throw new HibernateException(
+					"Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
+			);
+		}
+		catch (Exception e) {
+			throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
+		}
 
-        Iterator iter = cfg.getClassMappings();
-        DirectoryProviderFactory factory = new DirectoryProviderFactory();
-        while (iter.hasNext()) {
-            PersistentClass clazz = (PersistentClass) iter.next();
-            Class<?> mappedClass = clazz.getMappedClass();
-            if (mappedClass != null) {
-                XClass mappedXClass = reflectionManager.toXClass( mappedClass );
-                if ( mappedXClass != null && mappedXClass.isAnnotationPresent(Indexed.class) ) {
-                    DirectoryProvider provider = factory.createDirectoryProvider(mappedXClass, cfg);
-                    if (!indexLock.containsKey(provider)) {
-                        indexLock.put(provider, new ReentrantLock());
-                    }
-                    final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
-                            mappedXClass, analyzer, provider, reflectionManager
-                    );
+		Iterator iter = cfg.getClassMappings();
+		DirectoryProviderFactory factory = new DirectoryProviderFactory();
+		while ( iter.hasNext() ) {
+			PersistentClass clazz = (PersistentClass) iter.next();
+			Class<?> mappedClass = clazz.getMappedClass();
+			if ( mappedClass != null ) {
+				XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+				if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+					DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+					if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+						lockableDirectoryProviders.put( provider, new ReentrantLock() );
+					}
+					final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+							mappedXClass, analyzer, provider, reflectionManager
+					);
 
-                    documentBuilders.put(mappedClass, documentBuilder);
-                }
-            }
-        }
-        Set<Class> indexedClasses = documentBuilders.keySet();
-        for (DocumentBuilder builder : documentBuilders.values()) {
-            builder.postInitialize(indexedClasses);
-        }
-        initialized = true;
-    }
+					documentBuilders.put( mappedClass, documentBuilder );
+				}
+			}
+		}
+		Set<Class> indexedClasses = documentBuilders.keySet();
+		for ( DocumentBuilder builder : documentBuilders.values() ) {
+			builder.postInitialize( indexedClasses );
+		}
+		initialized = true;
+	}
 
-    public void onPostDelete(PostDeleteEvent event) {
-        DocumentBuilder builder = documentBuilders.get(event.getEntity().getClass());
-        if (builder != null) {
-            remove(builder, event.getId());
-        }
-    }
+	public void onPostDelete(PostDeleteEvent event) {
+		if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+			DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
+			processWork( work, event );
+		}
+	}
 
-    public void onPostInsert(PostInsertEvent event) {
-        final Object entity = event.getEntity();
-        DocumentBuilder<Object> builder = documentBuilders.get(entity.getClass());
-        if (builder != null) {
-            add(entity, builder, event.getId());
-        }
-    }
+	public void onPostInsert(PostInsertEvent event) {
+		final Object entity = event.getEntity();
+		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+		if ( builder != null ) {
+			Serializable id = event.getId();
+			Document doc = builder.getDocument( entity, id );
+			AddWork work = new AddWork( id, entity.getClass(), doc );
+			processWork( work, event );
+		}
+	}
 
-    public void onPostUpdate(PostUpdateEvent event) {
-        final Object entity = event.getEntity();
-        DocumentBuilder<Object> builder = documentBuilders.get(entity.getClass());
-        if (builder != null) {
-            final Serializable id = event.getId();
-            remove(builder, id);
-            add(entity, builder, id);
-        }
-    }
+	public void onPostUpdate(PostUpdateEvent event) {
+		final Object entity = event.getEntity();
+		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+		if ( builder != null ) {
+			Serializable id = event.getId();
+			Document doc = builder.getDocument( entity, id );
+			UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+			processWork( work, event );
+		}
+	}
 
-    private void remove(DocumentBuilder<?> builder, Serializable id) {
-        Term term = builder.getTerm(id);
-        log.debug("removing: " + term);
-        DirectoryProvider directoryProvider = builder.getDirectoryProvider();
-        Lock lock = indexLock.get(directoryProvider);
-        lock.lock();
-        try {
+	private void processWork(Work work, AbstractEvent event) {
+		WorkQueue workQueue = new WorkQueue( documentBuilders, lockableDirectoryProviders );
+		workQueue.add( work );
+		WorkInSynchronization sync = new WorkInSynchronization( workQueue );
+		if ( event.getSession().isTransactionInProgress() ) {
+			event.getSession().getTransaction().registerSynchronization( sync );
+		}
+		else {
+			sync.afterCompletion( Status.STATUS_COMMITTED );
+		}
+	}
 
-            IndexReader reader = IndexReader.open(directoryProvider.getDirectory());
-            reader.deleteDocuments(term);
-            reader.close();
-        }
-        catch (IOException ioe) {
-            throw new HibernateException(ioe);
-        }
-        finally {
-            lock.unlock();
-        }
-    }
+	@Deprecated
+	private void remove(DocumentBuilder<?> builder, Serializable id) {
+		//FIXME lookup by term id and filter by test _hibernate_class
+		Term term = builder.getTerm( id );
+		log.debug( "removing: " + term );
+		DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+		Lock lock = lockableDirectoryProviders.get( directoryProvider );
+		lock.lock();
+		try {
 
-    private void add(final Object entity, final DocumentBuilder<Object> builder, final Serializable id) {
-        Document doc = builder.getDocument(entity, id);
-        if (log.isDebugEnabled()) {
-            log.debug("adding: " + doc);
-        }
-        DirectoryProvider directoryProvider = builder.getDirectoryProvider();
-        Lock lock = indexLock.get(directoryProvider);
-        lock.lock();
-        try {
-            IndexWriter writer = new IndexWriter(
-                    directoryProvider.getDirectory(), builder.getAnalyzer(), false
-            ); //have been created at init time
-            writer.addDocument(doc);
-            writer.close();
-        }
-        catch (IOException ioe) {
-            throw new HibernateException(ioe);
-        }
-        finally {
-            lock.unlock();
-        }
-    }
+			IndexReader reader = IndexReader.open( directoryProvider.getDirectory() );
+			reader.deleteDocuments( term );
+			reader.close();
+		}
+		catch (IOException ioe) {
+			throw new HibernateException( ioe );
+		}
+		finally {
+			lock.unlock();
+		}
+	}
 
+	@Deprecated
+	private void add(final Object entity, final DocumentBuilder<Object> builder, final Serializable id) {
+		Document doc = builder.getDocument( entity, id );
+		if ( log.isDebugEnabled() ) {
+			log.debug( "adding: " + doc );
+		}
+		DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+		Lock lock = lockableDirectoryProviders.get( directoryProvider );
+		lock.lock();
+		try {
+			IndexWriter writer = new IndexWriter(
+					directoryProvider.getDirectory(), builder.getAnalyzer(), false
+			); //have been created at init time
+			writer.addDocument( doc );
+			writer.close();
+		}
+		catch (IOException ioe) {
+			throw new HibernateException( ioe );
+		}
+		finally {
+			lock.unlock();
+		}
+	}
+
 }

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/UpdateWork.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/UpdateWork.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/UpdateWork.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,15 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class UpdateWork extends Work {
+	public UpdateWork(Serializable id, Class entity, Document document) {
+		super( id, entity, document );
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Work.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Work.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Work.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,40 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * Represent a Lucene unit work
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class Work implements Serializable {
+	private Document document;
+	private Class entity;
+	private Serializable id;
+
+	public Work(Serializable id, Class entity) {
+		this( id, entity, null );
+	}
+
+	public Work(Serializable id, Class entity, Document document) {
+		this.id = id;
+		this.entity = entity;
+		this.document = document;
+	}
+
+
+	public Document getDocument() {
+		return document;
+	}
+
+	public Class getEntity() {
+		return entity;
+	}
+
+	public Serializable getId() {
+		return id;
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkInSynchronization.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkInSynchronization.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkInSynchronization.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,34 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import javax.transaction.Status;
+import javax.transaction.Synchronization;
+
+/**
+ * Execute some work inside a transaction sychronization
+ *
+ * @author Emmanuel Bernard
+ */
+public class WorkInSynchronization implements Synchronization {
+	private WorkQueue workQueue;
+
+	public WorkInSynchronization(WorkQueue workQueue) {
+		this.workQueue = workQueue;
+	}
+
+	public void beforeCompletion() {
+	}
+
+	public void afterCompletion(int i) {
+		if ( Status.STATUS_COMMITTED == i ) {
+			workQueue.performWork();
+		}
+		else {
+			workQueue.cancelWork();
+		}
+	}
+
+	public static interface Work {
+		void perform();
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkQueue.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkQueue.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/WorkQueue.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,58 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.hibernate.lucene.DocumentBuilder;
+import org.hibernate.lucene.store.DirectoryProvider;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class WorkQueue {
+	private Workspace workspace;
+	private Worker worker;
+	private List<Work> queue = new ArrayList<Work>();
+
+	public WorkQueue(Map<Class, DocumentBuilder<Object>> documentBuilders,
+					 Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+		workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
+		worker = new Worker( workspace );
+	}
+
+	public void add(Work work) {
+		//TODO optimize by getting rid of dupe works
+		if ( work instanceof UpdateWork ) {
+			//split in 2 to optimize the process (reader first, writer next
+			queue.add( new DeleteWork( work.getId(), work.getEntity() ) );
+			queue.add( new AddWork( work.getId(), work.getEntity(), work.getDocument() ) );
+		}
+		else {
+			queue.add( work );
+		}
+	}
+
+	public void performWork() {
+		try {
+			//use of index reader
+			for ( Work work : queue ) {
+				if ( work instanceof DeleteWork ) worker.performWork( work );
+			}
+			workspace.clean(); //close readers
+			for ( Work work : queue ) {
+				if ( work instanceof AddWork ) worker.performWork( work );
+			}
+		}
+		finally {
+			workspace.clean();
+			queue.clear();
+		}
+	}
+
+	public void cancelWork() {
+		queue.clear();
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Worker.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Worker.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Worker.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,91 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.hibernate.AssertionFailure;
+import org.hibernate.HibernateException;
+import org.hibernate.lucene.DocumentBuilder;
+
+/**
+ * Stateless implementation that perform a work
+ *
+ * @author Emmanuel Bernard
+ */
+public class Worker {
+	private Workspace workspace;
+	private static Log log = LogFactory.getLog( Worker.class );
+
+	public Worker(Workspace workspace) {
+		this.workspace = workspace;
+	}
+
+	public void performWork(Work work) {
+		if ( AddWork.class.isAssignableFrom( work.getClass() ) ) {
+			performWork( (AddWork) work );
+		}
+		else if ( UpdateWork.class.isAssignableFrom( work.getClass() ) ) {
+			performWork( (UpdateWork) work );
+		}
+		else if ( DeleteWork.class.isAssignableFrom( work.getClass() ) ) {
+			performWork( (DeleteWork) work );
+		}
+		else {
+			throw new AssertionFailure( "Unknown work type: " + work.getClass() );
+		}
+	}
+
+	public void performWork(AddWork work) {
+		Class entity = work.getEntity();
+		Serializable id = work.getId();
+		Document document = work.getDocument();
+		add( entity, id, document );
+	}
+
+	private void add(Class entity, Serializable id, Document document) {
+		if ( log.isTraceEnabled() )
+			log.trace( "add to Lucene index: " + entity + "#" + id + ": " + document );
+		IndexWriter writer = workspace.getIndexWriter( entity );
+		try {
+			writer.addDocument( document );
+		}
+		catch (IOException e) {
+			throw new HibernateException( "Unable to add to Lucene index: " + entity + "#" + id, e );
+		}
+	}
+
+	public void performWork(UpdateWork work) {
+		Class entity = work.getEntity();
+		Serializable id = work.getId();
+		Document document = work.getDocument();
+		remove( entity, id );
+		add( entity, id, document );
+	}
+
+	public void performWork(DeleteWork work) {
+		Class entity = work.getEntity();
+		Serializable id = work.getId();
+		remove( entity, id );
+	}
+
+	private void remove(Class entity, Serializable id) {
+		//FIXME lookup by term id and filter by test _hibernate_class
+		log.trace( "remove from Lucene index: " + entity + "#" + id );
+		DocumentBuilder builder = workspace.getDocumentBuilder( entity );
+		Term term = builder.getTerm( id );
+		IndexReader reader = workspace.getIndexReader( entity );
+		try {
+			reader.deleteDocuments( term );
+		}
+		catch (Exception e) {
+			throw new HibernateException( "Unable to remove from Lucene index: " + entity + "#" + id, e );
+		}
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Workspace.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Workspace.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/Workspace.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,137 @@
+//$Id: $
+package org.hibernate.lucene.event;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.hibernate.HibernateException;
+import org.hibernate.lucene.DocumentBuilder;
+import org.hibernate.lucene.store.DirectoryProvider;
+
+/**
+ * Lucene workspace
+ * This is not intended to be used in a multithreaded environment
+ * <p/>
+ * One cannot execute modification through an IndexReader when an IndexWriter has been acquired on the same underlying directory
+ * One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same underlying directory
+ * The recommended approach is to execute all the modifications on the IndexReaders, {@link #clean()} }, and acquire the
+ * index writers
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene use abstract classes, not interfaces
+public class Workspace {
+	private static Log log = LogFactory.getLog( Workspace.class );
+	private Map<Class, DocumentBuilder<Object>> documentBuilders;
+	private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
+	private Map<DirectoryProvider, IndexReader> readers = new HashMap<DirectoryProvider, IndexReader>();
+	private Map<DirectoryProvider, IndexWriter> writers = new HashMap<DirectoryProvider, IndexWriter>();
+	private List<DirectoryProvider> lockedProviders = new ArrayList<DirectoryProvider>();
+
+	public Workspace(Map<Class, DocumentBuilder<Object>> documentBuilders,
+					 Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+		this.documentBuilders = documentBuilders;
+		this.lockableDirectoryProviders = lockableDirectoryProviders;
+	}
+
+
+	public DocumentBuilder getDocumentBuilder(Class entity) {
+		return documentBuilders.get( entity );
+	}
+
+	public IndexReader getIndexReader(Class entity) {
+		//TODO NPEs
+		DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+		IndexReader reader = readers.get( provider );
+		if ( reader != null ) return reader;
+		lockProvider( provider );
+		try {
+			reader = IndexReader.open( provider.getDirectory() );
+			readers.put( provider, reader );
+		}
+		catch (IOException e) {
+			cleanUp( new HibernateException( "Unable to open IndexReader for " + entity, e ) );
+		}
+		return reader;
+	}
+
+	public IndexWriter getIndexWriter(Class entity) {
+		DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+		IndexWriter writer = writers.get( provider );
+		if ( writer != null ) return writer;
+		lockProvider( provider );
+		try {
+			writer = new IndexWriter(
+					provider.getDirectory(), documentBuilders.get( entity ).getAnalyzer(), false
+			); //have been created at init time
+			writers.put( provider, writer );
+		}
+		catch (IOException e) {
+			cleanUp( new HibernateException( "Unable to open IndexWriter for " + entity, e ) );
+		}
+		return writer;
+	}
+
+	private void lockProvider(DirectoryProvider provider) {
+		//make sure to use a semaphore
+		ReentrantLock lock = lockableDirectoryProviders.get( provider );
+		//of course a given thread cannot have a race cond with itself
+		if ( !lock.isHeldByCurrentThread() ) {
+			lock.lock();
+			lockedProviders.add( provider );
+		}
+	}
+
+	private void cleanUp(HibernateException originalException) {
+		//release all readers and writers, then reelase locks
+		HibernateException raisedException = originalException;
+		for ( IndexReader reader : readers.values() ) {
+			try {
+				reader.close();
+			}
+			catch (IOException e) {
+				if ( raisedException != null ) {
+					log.error( "Subsequent Exception while closing IndexReader", e );
+				}
+				else {
+					raisedException = new HibernateException( "Exception while closing IndexReader", e );
+				}
+			}
+		}
+		for ( IndexWriter writer : writers.values() ) {
+			try {
+				writer.close();
+			}
+			catch (IOException e) {
+				if ( raisedException != null ) {
+					log.error( "Subsequent Exception while closing IndexWriter", e );
+				}
+				else {
+					raisedException = new HibernateException( "Exception while closing IndexWriter", e );
+				}
+			}
+		}
+		for ( DirectoryProvider provider : lockedProviders ) {
+			lockableDirectoryProviders.get( provider ).unlock();
+		}
+		readers.clear();
+		writers.clear();
+		lockedProviders.clear();
+		if ( raisedException != null ) throw raisedException;
+	}
+
+	/**
+	 * release resources consumed in the workspace if any
+	 */
+	public void clean() {
+		cleanUp( null );
+	}
+}

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -247,7 +247,7 @@
     }
 
     private LuceneEventListener getLuceneEventListener() {
-        PostInsertEventListener[] listeners = session.getListeners().getPostCommitInsertEventListeners();
+        PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
         LuceneEventListener listener = null;
         //FIXME this sucks since we mandante the event listener use
         for (PostInsertEventListener candidate : listeners) {

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/MappingTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/MappingTest.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/MappingTest.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -182,9 +182,9 @@
 		cfg.setProperty( "hibernate.lucene.Clock.directory_provider", FSDirectoryProvider.class.getName() );
 		cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
 		LuceneEventListener del = new LuceneEventListener();
-		cfg.getEventListeners().setPostCommitDeleteEventListeners( new PostDeleteEventListener[]{del} );
-		cfg.getEventListeners().setPostCommitUpdateEventListeners( new PostUpdateEventListener[]{del} );
-		cfg.getEventListeners().setPostCommitInsertEventListeners( new PostInsertEventListener[]{del} );
+		cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
+		cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
+		cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
 	}
 
 }

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TestCase.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TestCase.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TestCase.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -2,12 +2,18 @@
 package org.hibernate.lucene.test;
 
 import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.store.Directory;
 import org.hibernate.event.PostDeleteEventListener;
 import org.hibernate.event.PostInsertEventListener;
 import org.hibernate.event.PostUpdateEventListener;
 import org.hibernate.lucene.Environment;
 import org.hibernate.lucene.store.RAMDirectoryProvider;
 import org.hibernate.lucene.event.LuceneEventListener;
+import org.hibernate.HibernateException;
+import org.hibernate.SessionFactory;
+import org.hibernate.impl.SessionFactoryImpl;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.SessionFactoryImplementor;
 
 /**
  * @author Emmanuel Bernard
@@ -18,12 +24,30 @@
 		buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
 	}
 
+	protected Directory getDirectory(Class clazz) {
+		return getLuceneEventListener().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
+	}
+
+	private LuceneEventListener getLuceneEventListener() {
+        PostInsertEventListener[] listeners = ( (SessionFactoryImpl) getSessions() ).getEventListeners().getPostInsertEventListeners();
+        LuceneEventListener listener = null;
+        //FIXME this sucks since we mandante the event listener use
+        for (PostInsertEventListener candidate : listeners) {
+            if (candidate instanceof LuceneEventListener) {
+                listener = (LuceneEventListener) candidate;
+                break;
+            }
+        }
+        if (listener == null) throw new HibernateException("Lucene event listener not initialized");
+        return listener;
+    }
+
 	protected void configure(org.hibernate.cfg.Configuration cfg) {
 		cfg.setProperty( "hibernate.lucene.default.directory_provider", RAMDirectoryProvider.class.getName() );
 		cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
 		LuceneEventListener del = new LuceneEventListener();
-		cfg.getEventListeners().setPostCommitDeleteEventListeners( new PostDeleteEventListener[]{del} );
-		cfg.getEventListeners().setPostCommitUpdateEventListeners( new PostUpdateEventListener[]{del} );
-		cfg.getEventListeners().setPostCommitInsertEventListeners( new PostInsertEventListener[]{del} );
+		cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
+		cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
+		cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
 	}
 }

Added: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TransactionTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TransactionTest.java	2006-10-31 21:30:15 UTC (rev 10670)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/TransactionTest.java	2006-11-01 04:27:26 UTC (rev 10671)
@@ -0,0 +1,72 @@
+//$Id: $
+package org.hibernate.lucene.test;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.hibernate.Session;
+import org.hibernate.HibernateException;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.lucene.LuceneSession;
+import org.hibernate.lucene.event.LuceneEventListener;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.store.Directory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TransactionTest extends TestCase {
+
+	public void testTransactionCommit() throws Exception {
+		Session s = getSessions().openSession();
+		s.getTransaction().begin();
+		s.persist(
+				new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+		);
+		s.getTransaction().commit();
+		s.close();
+
+		assertEquals( "transaction.commit() should no index", 1, getDocumentNumber() );
+
+		s = getSessions().openSession();
+		s.getTransaction().begin();
+		s.persist(
+				new Document( "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah" )
+		);
+		s.flush();
+		s.getTransaction().rollback();
+		s.close();
+
+		assertEquals( "rollback() should not index", 1, getDocumentNumber() );
+
+		s = getSessions().openSession();
+		s.persist(
+				new Document( "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah" )
+		);
+		s.flush();
+		s.close();
+
+		assertEquals( "no transaction should index", 2, getDocumentNumber() );
+
+	}
+
+	private int getDocumentNumber() throws IOException {
+		IndexReader reader = IndexReader.open( getDirectory( Document.class ) );
+		try {
+			return reader.numDocs();
+		}
+		finally {
+			reader.close();
+		}
+	}
+
+
+	protected Class[] getMappings() {
+		return new Class[]{Document.class};
+	}
+}




More information about the hibernate-commits mailing list