Author: epbernard
Date: 2007-01-10 15:13:22 -0500 (Wed, 10 Jan 2007)
New Revision: 11038
Added:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
Log:
ANN-524 share event intialization state by creating a SearchFactory
ANN-519 support multiple queue worker
Added:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -0,0 +1,138 @@
+//$Id: $
+package org.hibernate.search;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.WeakHashMap;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.search.Environment;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.WorkerFactory;
+import org.hibernate.search.backend.Worker;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.HibernateException;
+import org.hibernate.mapping.PersistentClass;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.Analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SearchFactory {
+ private static ThreadLocal<WeakHashMap<Configuration, SearchFactory>>
contexts =
+ new ThreadLocal<WeakHashMap<Configuration, SearchFactory>>();
+ static {
+ contexts.set( new WeakHashMap<Configuration, SearchFactory>(2) );
+ }
+ private Map<Class, DocumentBuilder<Object>> documentBuilders = new
HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use
the same directory provider
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+ new HashMap<DirectoryProvider, ReentrantLock>();
+ private Worker worker;
+
+ public SearchFactory(Configuration cfg) {
+ //yuk
+ ReflectionManager reflectionManager = ( (AnnotationConfiguration) cfg
).createExtendedMappings().getReflectionManager();
+
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+ if ( analyzerClassName != null ) {
+ try {
+ analyzerClass = ReflectHelper.classForName( analyzerClassName );
+ }
+ catch (Exception e) {
+ throw new HibernateException(
+ "Lucene analyzer class '" + analyzerClassName + "' defined
in property '" + Environment.ANALYZER_CLASS + "' could not be
found.",
+ e
+ );
+ }
+ }
+ else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer analyzer;
+ try {
+ analyzer = (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new HibernateException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ":
" + analyzerClassName
+ );
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Failed to instantiate lucene analyzer with type
" + analyzerClassName );
+ }
+
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while ( iter.hasNext() ) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if ( mappedClass != null ) {
+ XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+ if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class
) ) {
+ DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+ if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+ lockableDirectoryProviders.put( provider, new ReentrantLock() );
+ }
+ final DocumentBuilder<Object> documentBuilder = new
DocumentBuilder<Object>(
+ mappedXClass, analyzer, provider, reflectionManager
+ );
+
+ documentBuilders.put( mappedClass, documentBuilder );
+ }
+ }
+ }
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for ( DocumentBuilder builder : documentBuilders.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ WorkerFactory workerFactory = new WorkerFactory();
+ workerFactory.configure( cfg, this );
+ worker = workerFactory.createWorker();
+ }
+
+ //code doesn't have to be multithreaded because SF creation is not.
+ public static SearchFactory getSearchFactory(Configuration cfg) {
+ WeakHashMap<Configuration, SearchFactory> contextMap = contexts.get();
+ SearchFactory searchFactory = contextMap.get( cfg );
+ if ( searchFactory == null) {
+ searchFactory = new SearchFactory(cfg);
+
+ contextMap.put( cfg, searchFactory );
+ }
+ return searchFactory;
+ }
+
+
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
+
+ public void setDocumentBuilders(Map<Class, DocumentBuilder<Object>>
documentBuilders) {
+ this.documentBuilders = documentBuilders;
+ }
+
+ public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
+ return lockableDirectoryProviders;
+ }
+
+ public void setLockableDirectoryProviders(Map<DirectoryProvider, ReentrantLock>
lockableDirectoryProviders) {
+ this.lockableDirectoryProviders = lockableDirectoryProviders;
+ }
+
+ public Worker getWorker() {
+ return worker;
+ }
+}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -2,13 +2,10 @@
package org.hibernate.search.backend;
import java.util.Properties;
-import java.util.Map;
import java.util.List;
-import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.search.backend.Work;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
/**
* Execute the work for a given queue
@@ -18,8 +15,7 @@
public interface QueueWorker extends Runnable {
void run();
- void initialize(Properties props, Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders);
+ void initialize(Properties props, SearchFactory searchFactory);
void setQueue(List<Work> queue);
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -2,14 +2,9 @@
package org.hibernate.search.backend;
import java.util.Properties;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-import org.hibernate.event.AbstractEvent;
import org.hibernate.event.EventSource;
-import org.hibernate.cfg.Configuration;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
/**
* Perform work for a given session. This implementation has to be multi threaded
@@ -18,6 +13,5 @@
public interface Worker {
void performWork(Work work, EventSource session);
- void initialize(Properties props, Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders);
+ void initialize(Properties props, SearchFactory searchFactory);
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -8,6 +8,7 @@
import org.hibernate.cfg.Configuration;
import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.Environment;
import org.hibernate.util.StringHelper;
@@ -21,12 +22,11 @@
private Map<Class, DocumentBuilder<Object>> documentBuilders;
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Configuration cfg;
+ private SearchFactory searchFactory;
public void configure(Configuration cfg,
- Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
this.cfg = cfg;
}
@@ -67,7 +67,7 @@
throw new AnnotationException("Unable to instanciate worker class: " +
impl, e );
}
}
- worker.initialize( props, documentBuilders, lockableDirectoryProviders );
+ worker.initialize( props, searchFactory );
return worker;
}
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -15,6 +15,7 @@
import org.hibernate.HibernateException;
import org.hibernate.AssertionFailure;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
/**
@@ -31,26 +32,23 @@
//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene
use abstract classes, not interfaces
public class Workspace {
private static Log log = LogFactory.getLog( Workspace.class );
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Map<DirectoryProvider, IndexReader> readers = new
HashMap<DirectoryProvider, IndexReader>();
private Map<DirectoryProvider, IndexWriter> writers = new
HashMap<DirectoryProvider, IndexWriter>();
private List<DirectoryProvider> lockedProviders = new
ArrayList<DirectoryProvider>();
+ private SearchFactory searchFactory;
- public Workspace(Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public Workspace(SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
}
public DocumentBuilder getDocumentBuilder(Class entity) {
- return documentBuilders.get( entity );
+ return searchFactory.getDocumentBuilders().get( entity );
}
public IndexReader getIndexReader(Class entity) {
//TODO NPEs
- DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ DirectoryProvider provider = searchFactory.getDocumentBuilders().get( entity
).getDirectoryProvider();
//one cannot access a reader for update after a writer has been accessed
if ( writers.containsKey( provider ) )
throw new AssertionFailure("Tries to read for update a index while a writer is
accessed" + entity);
@@ -68,7 +66,7 @@
}
public IndexWriter getIndexWriter(Class entity) {
- DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ DirectoryProvider provider = searchFactory.getDocumentBuilders().get( entity
).getDirectoryProvider();
//one has to close a reader for update before a writer is accessed
IndexReader reader = readers.get( provider );
if ( reader != null ) {
@@ -85,7 +83,7 @@
lockProvider( provider );
try {
writer = new IndexWriter(
- provider.getDirectory(), documentBuilders.get( entity ).getAnalyzer(), false
+ provider.getDirectory(), searchFactory.getDocumentBuilders().get( entity
).getAnalyzer(), false
); //have been created at init time
writers.put( provider, writer );
}
@@ -97,7 +95,7 @@
private void lockProvider(DirectoryProvider provider) {
//make sure to use a semaphore
- ReentrantLock lock = lockableDirectoryProviders.get( provider );
+ ReentrantLock lock = searchFactory.getLockableDirectoryProviders().get( provider );
//of course a given thread cannot have a race cond with itself
if ( !lock.isHeldByCurrentThread() ) {
lock.lock();
@@ -135,7 +133,7 @@
}
}
for ( DirectoryProvider provider : lockedProviders ) {
- lockableDirectoryProviders.get( provider ).unlock();
+ searchFactory.getLockableDirectoryProviders().get( provider ).unlock();
}
readers.clear();
writers.clear();
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -2,31 +2,27 @@
package org.hibernate.search.backend.impl;
import java.util.List;
-import java.util.Map;
import java.util.Properties;
import java.util.Collections;
import java.util.Comparator;
-import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.QueueWorker;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
/**
* @author Emmanuel Bernard
*/
-class BatchedQueueWorker implements QueueWorker {
+public class BatchedQueueWorker implements QueueWorker {
private List<Work> queue;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
+ private SearchFactory searchFactory;
public void run() {
Workspace workspace;
LuceneWorker worker;
- workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
+ workspace = new Workspace( searchFactory );
worker = new LuceneWorker( workspace );
try {
deadlockFreeQueue(queue, workspace);
@@ -40,10 +36,8 @@
}
}
- public void initialize(Properties props, Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
}
public void setQueue(List<Work> queue) {
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -3,16 +3,13 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.Properties;
-import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ExecutorService;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.UpdateWork;
@@ -29,16 +26,13 @@
public class BatchedWorkQueue implements WorkQueue {
private List<Work> queue = new ArrayList<Work>();
private boolean sync;
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private ExecutorService executorService;
private Properties properties;
+ private SearchFactory searchFactory;
- public BatchedWorkQueue(Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders,
+ public BatchedWorkQueue(SearchFactory searchFactory,
Properties properties) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ this.searchFactory = searchFactory;
this.properties = properties;
//default to sync if none defined
this.sync = ! "async".equalsIgnoreCase( properties.getProperty(
Environment.WORKER_PREFIX + "type") );
@@ -69,7 +63,7 @@
//TODO implements parallel batchWorkers (one per Directory)
public void performWork() {
BatchedQueueWorker batchWorker = new BatchedQueueWorker();
- batchWorker.initialize( properties, documentBuilders, lockableDirectoryProviders );
+ batchWorker.initialize( properties, searchFactory );
batchWorker.setQueue( queue );
if (sync) {
batchWorker.run();
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -11,6 +11,7 @@
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.util.WeakIdentityHashMap;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.event.EventSource;
import org.hibernate.Transaction;
@@ -30,6 +31,7 @@
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Map<Class, DocumentBuilder<Object>> documentBuilders;
private Properties properties;
+ private SearchFactory searchFactory;
public void performWork(Work work, EventSource session) {
if ( session.isTransactionInProgress() ) {
@@ -37,7 +39,7 @@
PostTransactionWorkQueueSynchronization txSync =
(PostTransactionWorkQueueSynchronization)
queuePerTransaction.get( transaction );
if ( txSync == null || txSync.isConsumed() ) {
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, properties );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, properties );
txSync = new PostTransactionWorkQueueSynchronization( workQueue, queuePerTransaction
);
transaction.registerSynchronization( txSync );
queuePerTransaction.put(transaction, txSync);
@@ -45,17 +47,15 @@
txSync.add( work );
}
else {
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, properties );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, properties );
PostTransactionWorkQueueSynchronization sync = new
PostTransactionWorkQueueSynchronization( workQueue );
sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
}
}
- public void initialize(Properties props, Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
this.properties = props;
}
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -26,9 +26,9 @@
import org.hibernate.search.bridge.BridgeFactory;
import org.hibernate.search.bridge.FieldBridge;
import org.hibernate.search.bridge.TwoWayFieldBridge;
-import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.util.BinderHelper;
+import org.hibernate.search.SearchFactory;
import org.hibernate.reflection.ReflectionManager;
import org.hibernate.reflection.XAnnotatedElement;
import org.hibernate.reflection.XClass;
@@ -305,8 +305,8 @@
}
}
- public static Serializable getDocumentId(FullTextIndexEventListener listener, Class
clazz, Document document) {
- DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
+ public static Serializable getDocumentId(SearchFactory searchFactory, Class clazz,
Document document) {
+ DocumentBuilder builder = searchFactory.getDocumentBuilders().get( clazz );
if ( builder == null ) throw new HibernateException( "No Lucene configuration set
up for: " + clazz.getName() );
return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document
);
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -2,19 +2,10 @@
package org.hibernate.search.event;
import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
-import org.hibernate.HibernateException;
-import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.Configuration;
import org.hibernate.event.AbstractEvent;
import org.hibernate.event.Initializable;
@@ -24,21 +15,12 @@
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.event.PostUpdateEvent;
import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.search.Environment;
-import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.DeleteWork;
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.Worker;
-import org.hibernate.search.backend.WorkerFactory;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.store.DirectoryProviderFactory;
-import org.hibernate.mapping.PersistentClass;
-import org.hibernate.reflection.ReflectionManager;
-import org.hibernate.reflection.XClass;
-import org.hibernate.util.ReflectHelper;
+import org.hibernate.search.SearchFactory;
/**
* This listener supports setting a parent directory for all generated index files.
@@ -52,92 +34,21 @@
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock
inside the LDP
public class FullTextIndexEventListener implements PostDeleteEventListener,
PostInsertEventListener,
PostUpdateEventListener, Initializable {
- protected ReflectionManager reflectionManager;
- protected Worker worker;
-
- //FIXME keeping this here is a bad decision since you might want to search indexes wo
maintain it
- @Deprecated
- public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
- return documentBuilders;
- }
-
-
- private Map<Class, DocumentBuilder<Object>> documentBuilders = new
HashMap<Class, DocumentBuilder<Object>>();
- //keep track of the index modifiers per DirectoryProvider since multiple entity can use
the same directory provider
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
- new HashMap<DirectoryProvider, ReentrantLock>();
- private boolean initialized;
-
private static final Log log = LogFactory.getLog( FullTextIndexEventListener.class );
+ private SearchFactory searchFactory;
+
public void initialize(Configuration cfg) {
- if ( initialized ) return;
- //yuk
- reflectionManager = ( (AnnotationConfiguration) cfg
).createExtendedMappings().getReflectionManager();
+ searchFactory = SearchFactory.getSearchFactory( cfg );
+ }
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
- if ( analyzerClassName != null ) {
- try {
- analyzerClass = ReflectHelper.classForName( analyzerClassName );
- }
- catch (Exception e) {
- throw new HibernateException(
- "Lucene analyzer class '" + analyzerClassName + "' defined
in property '" + Environment.ANALYZER_CLASS + "' could not be
found.",
- e
- );
- }
- }
- else {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer analyzer;
- try {
- analyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new HibernateException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ":
" + analyzerClassName
- );
- }
- catch (Exception e) {
- throw new HibernateException( "Failed to instantiate lucene analyzer with type
" + analyzerClassName );
- }
-
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
- while ( iter.hasNext() ) {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if ( mappedClass != null ) {
- XClass mappedXClass = reflectionManager.toXClass( mappedClass );
- if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class
) ) {
- DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
- if ( !lockableDirectoryProviders.containsKey( provider ) ) {
- lockableDirectoryProviders.put( provider, new ReentrantLock() );
- }
- final DocumentBuilder<Object> documentBuilder = new
DocumentBuilder<Object>(
- mappedXClass, analyzer, provider, reflectionManager
- );
-
- documentBuilders.put( mappedClass, documentBuilder );
- }
- }
- }
- Set<Class> indexedClasses = documentBuilders.keySet();
- for ( DocumentBuilder builder : documentBuilders.values() ) {
- builder.postInitialize( indexedClasses );
- }
- WorkerFactory workerFactory = new WorkerFactory();
- workerFactory.configure( cfg, documentBuilders, lockableDirectoryProviders );
- worker = workerFactory.createWorker();
- initialized = true;
+ public SearchFactory getSearchFactory() {
+ return searchFactory;
}
public void onPostDelete(PostDeleteEvent event) {
- if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+ if ( searchFactory.getDocumentBuilders().containsKey( event.getEntity().getClass() ) )
{
DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
processWork( work, event );
}
@@ -145,7 +56,7 @@
public void onPostInsert(PostInsertEvent event) {
final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get(
entity.getClass() );
if ( builder != null ) {
Serializable id = event.getId();
Document doc = builder.getDocument( entity, id );
@@ -156,7 +67,7 @@
public void onPostUpdate(PostUpdateEvent event) {
final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get(
entity.getClass() );
if ( builder != null ) {
Serializable id = event.getId();
Document doc = builder.getDocument( entity, id );
@@ -166,10 +77,6 @@
}
private void processWork(Work work, AbstractEvent event) {
- worker.performWork( work, event.getSession() );
+ searchFactory.getWorker().performWork( work, event.getSession() );
}
-
- public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
- return lockableDirectoryProviders;
- }
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -8,7 +8,6 @@
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import java.util.concurrent.locks.ReentrantLock;
import javax.transaction.Status;
@@ -25,18 +24,33 @@
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
+import org.hibernate.Interceptor;
+import org.hibernate.ScrollableResults;
+import org.hibernate.ScrollMode;
+import org.hibernate.event.EventListeners;
+import org.hibernate.loader.custom.CustomQuery;
+import org.hibernate.persister.entity.EntityPersister;
+import org.hibernate.jdbc.Batcher;
+import org.hibernate.jdbc.JDBCContext;
+import org.hibernate.collection.PersistentCollection;
import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.SessionFactoryImplementor;
+import org.hibernate.engine.QueryParameters;
+import org.hibernate.engine.EntityKey;
+import org.hibernate.engine.PersistenceContext;
import org.hibernate.impl.SessionImpl;
+import org.hibernate.impl.CriteriaImpl;
import org.hibernate.search.query.FullTextQueryImpl;
-import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.util.ContextHelper;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.impl.BatchedWorkQueue;
import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.FullTextSession;
import org.hibernate.stat.SessionStatistics;
import org.hibernate.type.Type;
@@ -47,7 +61,7 @@
*
* @author Emmanuel Bernard
*/
-public class FullTextSessionImpl implements FullTextSession {
+public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
private final SessionImpl session;
private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
@@ -75,13 +89,13 @@
public void index(Object entity) {
if (entity == null) return;
Class clazz = entity.getClass();
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
- DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get( clazz
);
if ( builder != null ) {
Serializable id = session.getIdentifier( entity );
Document doc = builder.getDocument( entity, id );
UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
- processWork( work, listener.getDocumentBuilders(),
listener.getLockableDirectoryProviders() );
+ processWork( work, searchFactory );
}
//TODO
//need to add elements in a queue kept at the Session level
@@ -92,11 +106,10 @@
// this is an open discussion
}
- private void processWork(Work work, Map<Class, DocumentBuilder<Object>>
documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ private void processWork(Work work, SearchFactory searchFactory) {
if ( session.isTransactionInProgress() ) {
if ( postTransactionWorkQueueSynch == null ||
postTransactionWorkQueueSynch.isConsumed() ) {
- postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders,
lockableDirectoryProviders);
+ postTransactionWorkQueueSynch = createWorkQueueSync(searchFactory);
session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
}
postTransactionWorkQueueSynch.add( work );
@@ -104,17 +117,16 @@
else {
//no transaction work right away
PostTransactionWorkQueueSynchronization sync =
- createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
+ createWorkQueueSync( searchFactory );
sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
}
}
private PostTransactionWorkQueueSynchronization createWorkQueueSync(
- Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ SearchFactory searchFactory) {
//FIXME should be harmonized with the WorkerFactory?
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders,
lockableDirectoryProviders, new Properties() );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, new Properties() );
return new PostTransactionWorkQueueSynchronization( workQueue );
}
@@ -317,6 +329,156 @@
return session.getEnabledFilter( filterName );
}
+ public Interceptor getInterceptor() {
+ return session.getInterceptor();
+ }
+
+ public void setAutoClear(boolean enabled) {
+ session.setAutoClear( enabled );
+ }
+
+ public boolean isTransactionInProgress() {
+ return session.isTransactionInProgress();
+ }
+
+ public void initializeCollection(PersistentCollection collection, boolean writing)
throws HibernateException {
+ session.initializeCollection( collection, writing );
+ }
+
+ public Object internalLoad(String entityName, Serializable id, boolean eager, boolean
nullable)
+ throws HibernateException {
+ return session.internalLoad( entityName, id, eager, nullable );
+ }
+
+ public Object immediateLoad(String entityName, Serializable id) throws
HibernateException {
+ return session.immediateLoad( entityName, id );
+ }
+
+ public long getTimestamp() {
+ return session.getTimestamp();
+ }
+
+ public SessionFactoryImplementor getFactory() {
+ return session.getFactory();
+ }
+
+ public Batcher getBatcher() {
+ return session.getBatcher();
+ }
+
+ public List list(String query, QueryParameters queryParameters) throws
HibernateException {
+ return session.list( query, queryParameters );
+ }
+
+ public Iterator iterate(String query, QueryParameters queryParameters) throws
HibernateException {
+ return session.iterate( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(String query, QueryParameters queryParameters) throws
HibernateException {
+ return session.scroll( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) {
+ return session.scroll( criteria, scrollMode );
+ }
+
+ public List list(CriteriaImpl criteria) {
+ return session.list( criteria );
+ }
+
+ public List listFilter(Object collection, String filter, QueryParameters
queryParameters)
+ throws HibernateException {
+ return session.listFilter( collection, filter, queryParameters );
+ }
+
+ public Iterator iterateFilter(Object collection, String filter, QueryParameters
queryParameters)
+ throws HibernateException {
+ return session.iterateFilter( collection, filter, queryParameters );
+ }
+
+ public EntityPersister getEntityPersister(String entityName, Object object) throws
HibernateException {
+ return session.getEntityPersister( entityName, object );
+ }
+
+ public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException {
+ return session.getEntityUsingInterceptor( key );
+ }
+
+ public void afterTransactionCompletion(boolean successful, Transaction tx) {
+ session.afterTransactionCompletion( successful, tx );
+ }
+
+ public void beforeTransactionCompletion(Transaction tx) {
+ session.beforeTransactionCompletion( tx );
+ }
+
+ public Serializable getContextEntityIdentifier(Object object) {
+ return session.getContextEntityIdentifier( object );
+ }
+
+ public String bestGuessEntityName(Object object) {
+ return session.bestGuessEntityName( object );
+ }
+
+ public String guessEntityName(Object entity) throws HibernateException {
+ return session.guessEntityName( entity );
+ }
+
+ public Object instantiate(String entityName, Serializable id) throws HibernateException
{
+ return session.instantiate( entityName, id );
+ }
+
+ public List listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters)
throws HibernateException {
+ return session.listCustomQuery( customQuery, queryParameters );
+ }
+
+ public ScrollableResults scrollCustomQuery(CustomQuery customQuery, QueryParameters
queryParameters)
+ throws HibernateException {
+ return session.scrollCustomQuery( customQuery, queryParameters );
+ }
+
+ public List list(NativeSQLQuerySpecification spec, QueryParameters queryParameters)
throws HibernateException {
+ return session.list( spec, queryParameters );
+ }
+
+ public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters
queryParameters)
+ throws HibernateException {
+ return session.scroll( spec, queryParameters );
+ }
+
+ public Object getFilterParameterValue(String filterParameterName) {
+ return session.getFilterParameterValue( filterParameterName );
+ }
+
+ public Type getFilterParameterType(String filterParameterName) {
+ return session.getFilterParameterType( filterParameterName );
+ }
+
+ public Map getEnabledFilters() {
+ return session.getEnabledFilters();
+ }
+
+ public int getDontFlushFromFind() {
+ return session.getDontFlushFromFind();
+ }
+
+ public EventListeners getListeners() {
+ return session.getListeners();
+ }
+
+ public PersistenceContext getPersistenceContext() {
+ return session.getPersistenceContext();
+ }
+
+ public int executeUpdate(String query, QueryParameters queryParameters) throws
HibernateException {
+ return session.executeUpdate( query, queryParameters );
+ }
+
+ public int executeNativeUpdate(NativeSQLQuerySpecification specification,
QueryParameters queryParameters)
+ throws HibernateException {
+ return session.executeNativeUpdate( specification, queryParameters );
+ }
+
public EntityMode getEntityMode() {
return session.getEntityMode();
}
@@ -337,6 +499,34 @@
return session.getNamedQuery( queryName );
}
+ public Query getNamedSQLQuery(String name) {
+ return session.getNamedSQLQuery( name );
+ }
+
+ public boolean isEventSource() {
+ return session.isEventSource();
+ }
+
+ public void afterScrollOperation() {
+ session.afterScrollOperation();
+ }
+
+ public void setFetchProfile(String name) {
+ session.setFetchProfile( name );
+ }
+
+ public String getFetchProfile() {
+ return session.getFetchProfile();
+ }
+
+ public JDBCContext getJDBCContext() {
+ return session.getJDBCContext();
+ }
+
+ public boolean isClosed() {
+ return session.isClosed();
+ }
+
public org.hibernate.Session getSession(EntityMode entityMode) {
return session.getSession( entityMode );
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -34,7 +34,7 @@
import org.hibernate.engine.query.ParameterMetadata;
import org.hibernate.impl.AbstractQueryImpl;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.util.ContextHelper;
/**
@@ -71,9 +71,9 @@
//user stop using it
//scrollable is better in this area
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
Hits hits = searcher.search( query );
@@ -85,7 +85,7 @@
Document document = hits.doc( index );
EntityInfo entityInfo = new EntityInfo();
entityInfo.clazz = DocumentBuilder.getDocumentClass( document );
- entityInfo.id = DocumentBuilder.getDocumentId( listener, entityInfo.clazz, document
);
+ entityInfo.id = DocumentBuilder.getDocumentId( searchFactory, entityInfo.clazz,
document );
entityInfos.add( entityInfo );
}
return new IteratorImpl( entityInfos, (Session) this.session );
@@ -105,9 +105,9 @@
public ScrollableResults scroll() throws HibernateException {
//keep the searcher open until the resultset is closed
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session
);;
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );;
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
Hits hits;
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
@@ -115,7 +115,7 @@
setResultSize( hits );
int first = first();
int max = max( first, hits );
- return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session,
listener );
+ return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session,
searchFactory );
}
catch (IOException e) {
try {
@@ -134,9 +134,9 @@
}
public List list() throws HibernateException {
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session
);;
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );;
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
Hits hits;
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
@@ -149,7 +149,7 @@
for ( int index = first; index <= max; index++ ) {
Document document = hits.doc( index );
Class clazz = DocumentBuilder.getDocumentClass( document );
- Serializable id = DocumentBuilder.getDocumentId( listener, clazz, document );
+ Serializable id = DocumentBuilder.getDocumentId( searchFactory, clazz, document );
result.add( sess.load( clazz, id ) );
//use load to benefit from the batch-size
//we don't face proxy casting issues since the exact class is extracted from the
index
@@ -210,8 +210,8 @@
}
//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher
return, fix that.
- private Searcher buildSearcher(FullTextIndexEventListener listener) {
- Map<Class, DocumentBuilder<Object>> builders =
listener.getDocumentBuilders();
+ private Searcher buildSearcher(SearchFactory searchFactory) {
+ Map<Class, DocumentBuilder<Object>> builders =
searchFactory.getDocumentBuilders();
Set<Directory> directories = new HashSet<Directory>();
if ( classes == null || classes.length == 0 ) {
//no class means all classes
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -18,7 +18,7 @@
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
import org.hibernate.type.Type;
/**
@@ -31,11 +31,11 @@
private final int max;
private int current;
private final Session session;
- private final FullTextIndexEventListener listener;
private EntityInfo[] entityInfos;
+ private final SearchFactory searchFactory;
public ScrollableResultsImpl(
- Searcher searcher, Hits hits, int first, int max, Session session,
FullTextIndexEventListener listener
+ Searcher searcher, Hits hits, int first, int max, Session session, SearchFactory
searchFactory
) {
this.searcher = searcher;
this.hits = hits;
@@ -43,7 +43,7 @@
this.max = max;
this.current = first;
this.session = session;
- this.listener = listener;
+ this.searchFactory = searchFactory;
entityInfos = new EntityInfo[max - first + 1];
}
@@ -109,7 +109,7 @@
}
info.clazz = DocumentBuilder.getDocumentClass( document );
//FIXME should check that clazz match classes but this complexify a lot the
firstResult/maxResult
- info.id = DocumentBuilder.getDocumentId( listener, info.clazz, document );
+ info.id = DocumentBuilder.getDocumentId( searchFactory, info.clazz, document );
entityInfos[current - first] = info;
}
return new Object[]{
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -5,13 +5,14 @@
import org.hibernate.engine.SessionImplementor;
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
/**
* @author Emmanuel Bernard
*/
public abstract class ContextHelper {
- public static FullTextIndexEventListener getLuceneEventListener(SessionImplementor
session) {
+ public static SearchFactory getSearchFactory(SessionImplementor session) {
PostInsertEventListener[] listeners =
session.getListeners().getPostInsertEventListeners();
FullTextIndexEventListener listener = null;
//FIXME this sucks since we mandante the event listener use
@@ -22,6 +23,6 @@
}
}
if ( listener == null ) throw new HibernateException( "Lucene event listener not
initialized" );
- return listener;
+ return listener.getSearchFactory();
}
}
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
===================================================================
---
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2007-01-09
16:04:16 UTC (rev 11037)
+++
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2007-01-10
20:13:22 UTC (rev 11038)
@@ -22,7 +22,7 @@
}
protected Directory getDirectory(Class clazz) {
- return getLuceneEventListener().getDocumentBuilders().get( clazz
).getDirectoryProvider().getDirectory();
+ return getLuceneEventListener().getSearchFactory().getDocumentBuilders().get( clazz
).getDirectoryProvider().getDirectory();
}
private FullTextIndexEventListener getLuceneEventListener() {
@@ -42,9 +42,9 @@
protected void configure(org.hibernate.cfg.Configuration cfg) {
cfg.setProperty( "hibernate.search.default.directory_provider",
RAMDirectoryProvider.class.getName() );
cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
- FullTextIndexEventListener del = new FullTextIndexEventListener();
- cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del}
);
- cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del}
);
- cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del}
);
+ //FullTextIndexEventListener del = new FullTextIndexEventListener();
+ cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{ new
FullTextIndexEventListener() } );
+ cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{ new
FullTextIndexEventListener() } );
+ cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{ new
FullTextIndexEventListener() } );
}
}