[hibernate-commits] Hibernate SVN: r10865 - in branches/Lucene_Integration/HibernateExt/metadata/src: java/org/hibernate/search java/org/hibernate/search/bridge java/org/hibernate/search/engine java/org/hibernate/search/event java/org/hibernate/search/impl java/org/hibernate/search/query java/org/hibernate/search/util test/org/hibernate/search/test test/org/hibernate/search/test/bridge test/org/hibernate/search/test/fieldAccess test/org/hibernate/search/test/inheritance test/org/hibernate/search/test/query test/org/hibernate/search/test/session

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Thu Nov 23 17:30:29 EST 2006


Author: epbernard
Date: 2006-11-23 17:30:01 -0500 (Thu, 23 Nov 2006)
New Revision: 10865

Added:
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
Removed:
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java
Modified:
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
   branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
   branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
Log:
Finish getting rid of Lucene when it is unappropriate.
Polish renaming

Deleted: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -1,465 +0,0 @@
-//$Id: $
-package org.hibernate.search;
-
-import java.io.Serializable;
-import java.sql.Connection;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-
-import javax.transaction.Status;
-
-import org.hibernate.CacheMode;
-import org.hibernate.Criteria;
-import org.hibernate.EntityMode;
-import org.hibernate.Filter;
-import org.hibernate.FlushMode;
-import org.hibernate.HibernateException;
-import org.hibernate.LockMode;
-import org.hibernate.Query;
-import org.hibernate.ReplicationMode;
-import org.hibernate.SQLQuery;
-import org.hibernate.Session;
-import org.hibernate.SessionFactory;
-import org.hibernate.Transaction;
-import org.hibernate.engine.query.ParameterMetadata;
-import org.hibernate.impl.SessionImpl;
-import org.hibernate.search.query.FullTextQueryImpl;
-import org.hibernate.search.event.FullTextEventListener;
-import org.hibernate.search.util.ContextHelper;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.backend.UpdateWork;
-import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
-import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.stat.SessionStatistics;
-import org.hibernate.type.Type;
-import org.apache.lucene.document.Document;
-
-/**
- * Lucene aware session that allows lucene query creations
- *
- * @author Emmanuel Bernard
- */
-public class FullTextSession implements Session {
-	private final SessionImpl session;
-	private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
-
-	public FullTextSession(Session session) {
-		this.session = (SessionImpl) session;
-	}
-
-	/**
-	 * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
-     * If entities is empty, include all indexed entities
-     * 
-	 * @param entities must be immutable for the lifetime of the query object
-	 */
-	public Query createLuceneQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
-		return new FullTextQueryImpl( luceneQuery, entities, session, new ParameterMetadata(null, null) );
-	}
-
-	/**
-	 * (re)index an entity.
-	 * Non indexable entities are ignored
-	 * The entity must be associated with the session
-	 *
-	 * @param entity must not be null
-	 */
-	public void index(Object entity) {
-		if (entity == null) return;
-		Class clazz = entity.getClass();
-		FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );
-		DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
-		if ( builder != null ) {
-			Serializable id = session.getIdentifier( entity );
-			Document doc = builder.getDocument( entity, id );
-			UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
-			processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
-		}
-		//TODO
-		//need to add elements in a queue kept at the Session level
-		//the queue will be processed by a Lucene(Auto)FlushEventListener
-		//note that we could keep this queue somewhere in the event listener in the mean time but that requires
-		// a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
-		//an other solution would be to subclass SessionImpl instead of having this LuceneSession delecation model
-		// this is an open discussion
-	}
-
-	private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
-							 Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
-		if ( session.isTransactionInProgress() ) {
-			if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
-				postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
-				session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
-			}
-			postTransactionWorkQueueSynch.add( work );
-		}
-		else {
-			//no transaction work right away
-			PostTransactionWorkQueueSynchronization sync =
-					createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
-			sync.add( work );
-			sync.afterCompletion( Status.STATUS_COMMITTED );
-		}
-	}
-
-	private PostTransactionWorkQueueSynchronization createWorkQueueSync(
-			Map<Class, DocumentBuilder<Object>> documentBuilders,
-			Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
-		WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
-		return new PostTransactionWorkQueueSynchronization( workQueue );
-	}
-
-	public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
-		return session.createSQLQuery( sql, returnAlias, returnClass );
-	}
-
-	public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
-		return session.createSQLQuery( sql, returnAliases, returnClasses );
-	}
-
-	public int delete(String query) throws HibernateException {
-		return session.delete( query );
-	}
-
-	public int delete(String query, Object value, Type type) throws HibernateException {
-		return session.delete( query, value, type );
-	}
-
-	public int delete(String query, Object[] values, Type[] types) throws HibernateException {
-		return session.delete( query, values, types );
-	}
-
-	public Collection filter(Object collection, String filter) throws HibernateException {
-		return session.filter( collection, filter );
-	}
-
-	public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
-		return session.filter( collection, filter, value, type );
-	}
-
-	public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException {
-		return session.filter( collection, filter, values, types );
-	}
-
-	public List find(String query) throws HibernateException {
-		return session.find( query );
-	}
-
-	public List find(String query, Object value, Type type) throws HibernateException {
-		return session.find( query, value, type );
-	}
-
-	public List find(String query, Object[] values, Type[] types) throws HibernateException {
-		return session.find( query, values, types );
-	}
-
-	public Iterator iterate(String query) throws HibernateException {
-		return session.iterate( query );
-	}
-
-	public Iterator iterate(String query, Object value, Type type) throws HibernateException {
-		return session.iterate( query, value, type );
-	}
-
-	public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
-		return session.iterate( query, values, types );
-	}
-
-	public void save(String entityName, Object object, Serializable id) throws HibernateException {
-		session.save( entityName, object, id );
-	}
-
-	public void save(Object object, Serializable id) throws HibernateException {
-		session.save( object, id );
-	}
-
-	public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
-		return session.saveOrUpdateCopy( entityName, object );
-	}
-
-	public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
-		return session.saveOrUpdateCopy( entityName, object, id );
-	}
-
-	public Object saveOrUpdateCopy(Object object) throws HibernateException {
-		return session.saveOrUpdateCopy( object );
-	}
-
-	public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
-		return session.saveOrUpdateCopy( object, id );
-	}
-
-	public void update(String entityName, Object object, Serializable id) throws HibernateException {
-		session.update( entityName, object, id );
-	}
-
-	public void update(Object object, Serializable id) throws HibernateException {
-		session.update( object, id );
-	}
-
-	public Transaction beginTransaction() throws HibernateException {
-		return session.beginTransaction();
-	}
-
-	public void cancelQuery() throws HibernateException {
-		session.cancelQuery();
-	}
-
-	public void clear() {
-		session.clear();
-	}
-
-	public Connection close() throws HibernateException {
-		return session.close();
-	}
-
-	public Connection connection() throws HibernateException {
-		return session.connection();
-	}
-
-	public boolean contains(Object object) {
-		return session.contains( object );
-	}
-
-	public Criteria createCriteria(String entityName) {
-		return session.createCriteria( entityName );
-	}
-
-	public Criteria createCriteria(String entityName, String alias) {
-		return session.createCriteria( entityName, alias );
-	}
-
-	public Criteria createCriteria(Class persistentClass) {
-		return session.createCriteria( persistentClass );
-	}
-
-	public Criteria createCriteria(Class persistentClass, String alias) {
-		return session.createCriteria( persistentClass, alias );
-	}
-
-	public Query createFilter(Object collection, String queryString) throws HibernateException {
-		return session.createFilter( collection, queryString );
-	}
-
-	public Query createQuery(String queryString) throws HibernateException {
-		return session.createQuery( queryString );
-	}
-
-	public SQLQuery createSQLQuery(String queryString) throws HibernateException {
-		return session.createSQLQuery( queryString );
-	}
-
-	public void delete(String entityName, Object object) throws HibernateException {
-		session.delete( entityName, object );
-	}
-
-	public void delete(Object object) throws HibernateException {
-		session.delete( object );
-	}
-
-	public void disableFilter(String filterName) {
-		session.disableFilter( filterName );
-	}
-
-	public Connection disconnect() throws HibernateException {
-		return session.disconnect();
-	}
-
-	public Filter enableFilter(String filterName) {
-		return session.enableFilter( filterName );
-	}
-
-	public void evict(Object object) throws HibernateException {
-		session.evict( object );
-	}
-
-	public void flush() throws HibernateException {
-		session.flush();
-	}
-
-	public Object get(Class clazz, Serializable id) throws HibernateException {
-		return session.get( clazz, id );
-	}
-
-	public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
-		return session.get( clazz, id, lockMode );
-	}
-
-	public Object get(String entityName, Serializable id) throws HibernateException {
-		return session.get( entityName, id );
-	}
-
-	public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
-		return session.get( entityName, id, lockMode );
-	}
-
-	public CacheMode getCacheMode() {
-		return session.getCacheMode();
-	}
-
-	public LockMode getCurrentLockMode(Object object) throws HibernateException {
-		return session.getCurrentLockMode( object );
-	}
-
-	public Filter getEnabledFilter(String filterName) {
-		return session.getEnabledFilter( filterName );
-	}
-
-	public EntityMode getEntityMode() {
-		return session.getEntityMode();
-	}
-
-	public String getEntityName(Object object) throws HibernateException {
-		return session.getEntityName( object );
-	}
-
-	public FlushMode getFlushMode() {
-		return session.getFlushMode();
-	}
-
-	public Serializable getIdentifier(Object object) throws HibernateException {
-		return session.getIdentifier( object );
-	}
-
-	public Query getNamedQuery(String queryName) throws HibernateException {
-		return session.getNamedQuery( queryName );
-	}
-
-	public org.hibernate.Session getSession(EntityMode entityMode) {
-		return session.getSession( entityMode );
-	}
-
-	public SessionFactory getSessionFactory() {
-		return session.getSessionFactory();
-	}
-
-	public SessionStatistics getStatistics() {
-		return session.getStatistics();
-	}
-
-	public Transaction getTransaction() {
-		return session.getTransaction();
-	}
-
-	public boolean isConnected() {
-		return session.isConnected();
-	}
-
-	public boolean isDirty() throws HibernateException {
-		return session.isDirty();
-	}
-
-	public boolean isOpen() {
-		return session.isOpen();
-	}
-
-	public Object load(String entityName, Serializable id) throws HibernateException {
-		return session.load( entityName, id );
-	}
-
-	public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
-		return session.load( entityName, id, lockMode );
-	}
-
-	public void load(Object object, Serializable id) throws HibernateException {
-		session.load( object, id );
-	}
-
-	public Object load(Class theClass, Serializable id) throws HibernateException {
-		return session.load( theClass, id );
-	}
-
-	public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
-		return session.load( theClass, id, lockMode );
-	}
-
-	public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
-		session.lock( entityName, object, lockMode );
-	}
-
-	public void lock(Object object, LockMode lockMode) throws HibernateException {
-		session.lock( object, lockMode );
-	}
-
-	public Object merge(String entityName, Object object) throws HibernateException {
-		return session.merge( entityName, object );
-	}
-
-	public Object merge(Object object) throws HibernateException {
-		return session.merge( object );
-	}
-
-	public void persist(String entityName, Object object) throws HibernateException {
-		session.persist( entityName, object );
-	}
-
-	public void persist(Object object) throws HibernateException {
-		session.persist( object );
-	}
-
-	public void reconnect() throws HibernateException {
-		session.reconnect();
-	}
-
-	public void reconnect(Connection connection) throws HibernateException {
-		session.reconnect( connection );
-	}
-
-	public void refresh(Object object) throws HibernateException {
-		session.refresh( object );
-	}
-
-	public void refresh(Object object, LockMode lockMode) throws HibernateException {
-		session.refresh( object, lockMode );
-	}
-
-	public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
-		session.replicate( entityName, object, replicationMode );
-	}
-
-	public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
-		session.replicate( object, replicationMode );
-	}
-
-	public Serializable save(String entityName, Object object) throws HibernateException {
-		return session.save( entityName, object );
-	}
-
-	public Serializable save(Object object) throws HibernateException {
-		return session.save( object );
-	}
-
-	public void saveOrUpdate(String entityName, Object object) throws HibernateException {
-		session.saveOrUpdate( entityName, object );
-	}
-
-	public void saveOrUpdate(Object object) throws HibernateException {
-		session.saveOrUpdate( object );
-	}
-
-	public void setCacheMode(CacheMode cacheMode) {
-		session.setCacheMode( cacheMode );
-	}
-
-	public void setFlushMode(FlushMode flushMode) {
-		session.setFlushMode( flushMode );
-	}
-
-	public void setReadOnly(Object entity, boolean readOnly) {
-		session.setReadOnly( entity, readOnly );
-	}
-
-	public void update(String entityName, Object object) throws HibernateException {
-		session.update( entityName, object );
-	}
-
-	public void update(Object object) throws HibernateException {
-		session.update( object );
-	}
-}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,25 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.classic.Session;
+import org.hibernate.Query;
+
+/**
+ * Extends the Hibernate {@link Session} with Full text search and indexing capabilities
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextSession extends Session {
+	/**
+	 * Create a Query on top of a native Lucene Query returning the matching objects
+	 * of type <code>entities</code> and their respective subclasses.
+	 * If no entity is provided, no type filtering is done.
+	 */
+	Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities);
+
+	/**
+	 * Force the (re)indexing of a given <b>managed</b> object.
+	 * Indexation is batched per transaction
+	 */
+	void index(Object entity);
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/Search.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,19 @@
+//$Id: $
+package org.hibernate.search;
+
+import org.hibernate.Session;
+import org.hibernate.search.impl.FullTextSessionImpl;
+
+/**
+ * Helper class to get a FullTextSession out of a regular session
+ * @author Emmanuel Bernard
+ */
+public final class Search {
+
+	private Search() {
+	}
+
+	public static FullTextSession createFullTextSession(Session session) {
+		return new FullTextSessionImpl(session);
+	}
+}
\ No newline at end of file

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/bridge/FieldBridge.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -17,7 +17,7 @@
 	 * Manipulate the document to index the given value.
 	 * A common implementation is to add a Field <code>name</code> to the given document following
 	 * the parameters (<code>store</code>, <code>index</code>, <code>boost</code>) if the
-	 * <code>value></code> is not null
+	 * <code>value</code> is not null
 	 */
 	void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost);
 }

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -26,7 +26,7 @@
 import org.hibernate.search.bridge.BridgeFactory;
 import org.hibernate.search.bridge.FieldBridge;
 import org.hibernate.search.bridge.TwoWayFieldBridge;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 import org.hibernate.search.store.DirectoryProvider;
 import org.hibernate.search.util.BinderHelper;
 import org.hibernate.reflection.ReflectionManager;
@@ -305,7 +305,7 @@
 		}
 	}
 
-	public static Serializable getDocumentId(FullTextEventListener listener, Class clazz, Document document) {
+	public static Serializable getDocumentId(FullTextIndexEventListener listener, Class clazz, Document document) {
 		DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
 		if ( builder == null ) throw new HibernateException( "No Lucene configuration set up for: " + clazz.getName() );
 		return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );

Deleted: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -1,180 +0,0 @@
-//$Id$
-package org.hibernate.search.event;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReentrantLock;
-import javax.transaction.Status;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.hibernate.HibernateException;
-import org.hibernate.cfg.AnnotationConfiguration;
-import org.hibernate.cfg.Configuration;
-import org.hibernate.event.AbstractEvent;
-import org.hibernate.event.Initializable;
-import org.hibernate.event.PostDeleteEvent;
-import org.hibernate.event.PostDeleteEventListener;
-import org.hibernate.event.PostInsertEvent;
-import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.event.PostUpdateEvent;
-import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.search.Environment;
-import org.hibernate.search.annotations.Indexed;
-import org.hibernate.search.backend.AddWork;
-import org.hibernate.search.backend.DeleteWork;
-import org.hibernate.search.backend.UpdateWork;
-import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.WorkQueue;
-import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
-import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.store.DirectoryProviderFactory;
-import org.hibernate.mapping.PersistentClass;
-import org.hibernate.reflection.ReflectionManager;
-import org.hibernate.reflection.XClass;
-import org.hibernate.util.ReflectHelper;
-
-/**
- * This listener supports setting a parent directory for all generated index files.
- * It also supports setting the analyzer class to be used.
- *
- * @author Gavin King
- * @author Emmanuel Bernard
- * @author Mattias Arbin
- */
-//TODO work on sharing the same indexWriters and readers across a single post operation...
-//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
-public class FullTextEventListener implements PostDeleteEventListener, PostInsertEventListener,
-		PostUpdateEventListener, Initializable {
-	ReflectionManager reflectionManager;
-
-	//FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
-	@Deprecated
-	public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
-		return documentBuilders;
-	}
-
-
-	private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
-	//keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
-	private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
-			new HashMap<DirectoryProvider, ReentrantLock>();
-	private boolean initialized;
-
-	private static final Log log = LogFactory.getLog( FullTextEventListener.class );
-
-	public void initialize(Configuration cfg) {
-		if ( initialized ) return;
-		//yuk
-		reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
-
-		Class analyzerClass;
-		String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
-		if ( analyzerClassName != null ) {
-			try {
-				analyzerClass = ReflectHelper.classForName( analyzerClassName );
-			}
-			catch (Exception e) {
-				throw new HibernateException(
-						"Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
-						e
-				);
-			}
-		}
-		else {
-			analyzerClass = StandardAnalyzer.class;
-		}
-		// Initialize analyzer
-		Analyzer analyzer;
-		try {
-			analyzer = (Analyzer) analyzerClass.newInstance();
-		}
-		catch (ClassCastException e) {
-			throw new HibernateException(
-					"Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
-			);
-		}
-		catch (Exception e) {
-			throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
-		}
-
-		Iterator iter = cfg.getClassMappings();
-		DirectoryProviderFactory factory = new DirectoryProviderFactory();
-		while ( iter.hasNext() ) {
-			PersistentClass clazz = (PersistentClass) iter.next();
-			Class<?> mappedClass = clazz.getMappedClass();
-			if ( mappedClass != null ) {
-				XClass mappedXClass = reflectionManager.toXClass( mappedClass );
-				if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
-					DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
-					if ( !lockableDirectoryProviders.containsKey( provider ) ) {
-						lockableDirectoryProviders.put( provider, new ReentrantLock() );
-					}
-					final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
-							mappedXClass, analyzer, provider, reflectionManager
-					);
-
-					documentBuilders.put( mappedClass, documentBuilder );
-				}
-			}
-		}
-		Set<Class> indexedClasses = documentBuilders.keySet();
-		for ( DocumentBuilder builder : documentBuilders.values() ) {
-			builder.postInitialize( indexedClasses );
-		}
-		initialized = true;
-	}
-
-	public void onPostDelete(PostDeleteEvent event) {
-		if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
-			DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
-			processWork( work, event );
-		}
-	}
-
-	public void onPostInsert(PostInsertEvent event) {
-		final Object entity = event.getEntity();
-		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
-		if ( builder != null ) {
-			Serializable id = event.getId();
-			Document doc = builder.getDocument( entity, id );
-			AddWork work = new AddWork( id, entity.getClass(), doc );
-			processWork( work, event );
-		}
-	}
-
-	public void onPostUpdate(PostUpdateEvent event) {
-		final Object entity = event.getEntity();
-		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
-		if ( builder != null ) {
-			Serializable id = event.getId();
-			Document doc = builder.getDocument( entity, id );
-			UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
-			processWork( work, event );
-		}
-	}
-
-	private void processWork(Work work, AbstractEvent event) {
-		WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
-		workQueue.add( work );
-		PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
-		if ( event.getSession().isTransactionInProgress() ) {
-			event.getSession().getTransaction().registerSynchronization( sync );
-		}
-		else {
-			sync.afterCompletion( Status.STATUS_COMMITTED );
-		}
-	}
-
-	public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
-		return lockableDirectoryProviders;
-	}
-}

Copied: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java (from rev 10743, branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java)
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextEventListener.java	2006-11-07 01:16:53 UTC (rev 10743)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,180 @@
+//$Id$
+package org.hibernate.search.event;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+import javax.transaction.Status;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.event.AbstractEvent;
+import org.hibernate.event.Initializable;
+import org.hibernate.event.PostDeleteEvent;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEvent;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEvent;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.backend.AddWork;
+import org.hibernate.search.backend.DeleteWork;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * This listener supports setting a parent directory for all generated index files.
+ * It also supports setting the analyzer class to be used.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Mattias Arbin
+ */
+//TODO work on sharing the same indexWriters and readers across a single post operation...
+//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
+public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
+		PostUpdateEventListener, Initializable {
+	ReflectionManager reflectionManager;
+
+	//FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
+	@Deprecated
+	public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+		return documentBuilders;
+	}
+
+
+	private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+	//keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+	private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+			new HashMap<DirectoryProvider, ReentrantLock>();
+	private boolean initialized;
+
+	private static final Log log = LogFactory.getLog( FullTextIndexEventListener.class );
+
+	public void initialize(Configuration cfg) {
+		if ( initialized ) return;
+		//yuk
+		reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+
+		Class analyzerClass;
+		String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+		if ( analyzerClassName != null ) {
+			try {
+				analyzerClass = ReflectHelper.classForName( analyzerClassName );
+			}
+			catch (Exception e) {
+				throw new HibernateException(
+						"Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
+						e
+				);
+			}
+		}
+		else {
+			analyzerClass = StandardAnalyzer.class;
+		}
+		// Initialize analyzer
+		Analyzer analyzer;
+		try {
+			analyzer = (Analyzer) analyzerClass.newInstance();
+		}
+		catch (ClassCastException e) {
+			throw new HibernateException(
+					"Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
+			);
+		}
+		catch (Exception e) {
+			throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
+		}
+
+		Iterator iter = cfg.getClassMappings();
+		DirectoryProviderFactory factory = new DirectoryProviderFactory();
+		while ( iter.hasNext() ) {
+			PersistentClass clazz = (PersistentClass) iter.next();
+			Class<?> mappedClass = clazz.getMappedClass();
+			if ( mappedClass != null ) {
+				XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+				if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+					DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+					if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+						lockableDirectoryProviders.put( provider, new ReentrantLock() );
+					}
+					final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+							mappedXClass, analyzer, provider, reflectionManager
+					);
+
+					documentBuilders.put( mappedClass, documentBuilder );
+				}
+			}
+		}
+		Set<Class> indexedClasses = documentBuilders.keySet();
+		for ( DocumentBuilder builder : documentBuilders.values() ) {
+			builder.postInitialize( indexedClasses );
+		}
+		initialized = true;
+	}
+
+	public void onPostDelete(PostDeleteEvent event) {
+		if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+			DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
+			processWork( work, event );
+		}
+	}
+
+	public void onPostInsert(PostInsertEvent event) {
+		final Object entity = event.getEntity();
+		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+		if ( builder != null ) {
+			Serializable id = event.getId();
+			Document doc = builder.getDocument( entity, id );
+			AddWork work = new AddWork( id, entity.getClass(), doc );
+			processWork( work, event );
+		}
+	}
+
+	public void onPostUpdate(PostUpdateEvent event) {
+		final Object entity = event.getEntity();
+		DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+		if ( builder != null ) {
+			Serializable id = event.getId();
+			Document doc = builder.getDocument( entity, id );
+			UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+			processWork( work, event );
+		}
+	}
+
+	private void processWork(Work work, AbstractEvent event) {
+		WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+		workQueue.add( work );
+		PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
+		if ( event.getSession().isTransactionInProgress() ) {
+			event.getSession().getTransaction().registerSynchronization( sync );
+		}
+		else {
+			sync.afterCompletion( Status.STATUS_COMMITTED );
+		}
+	}
+
+	public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
+		return lockableDirectoryProviders;
+	}
+}


Property changes on: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:keywords
   + Author Date Id Revision
Name: svn:eol-style
   + native

Copied: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java (from rev 10743, branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java)
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/FullTextSession.java	2006-11-07 01:16:53 UTC (rev 10743)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,466 @@
+//$Id: $
+package org.hibernate.search.impl;
+
+import java.io.Serializable;
+import java.sql.Connection;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.transaction.Status;
+
+import org.hibernate.CacheMode;
+import org.hibernate.Criteria;
+import org.hibernate.EntityMode;
+import org.hibernate.Filter;
+import org.hibernate.FlushMode;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ReplicationMode;
+import org.hibernate.SQLQuery;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.impl.SessionImpl;
+import org.hibernate.search.query.FullTextQueryImpl;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.util.ContextHelper;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.UpdateWork;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.impl.BatchLuceneWorkQueue;
+import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.stat.SessionStatistics;
+import org.hibernate.type.Type;
+import org.apache.lucene.document.Document;
+
+/**
+ * Lucene Full text search aware session
+ *
+ * @author Emmanuel Bernard
+ */
+public class FullTextSessionImpl implements FullTextSession {
+	private final SessionImpl session;
+	private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
+
+	public FullTextSessionImpl(Session session) {
+		this.session = (SessionImpl) session;
+	}
+
+	/**
+	 * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
+     * If entities is empty, include all indexed entities
+     * 
+	 * @param entities must be immutable for the lifetime of the query object
+	 */
+	public Query createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
+		return new FullTextQueryImpl( luceneQuery, entities, session, new ParameterMetadata(null, null) );
+	}
+
+	/**
+	 * (re)index an entity.
+	 * Non indexable entities are ignored
+	 * The entity must be associated with the session
+	 *
+	 * @param entity must not be null
+	 */
+	public void index(Object entity) {
+		if (entity == null) return;
+		Class clazz = entity.getClass();
+		FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+		DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
+		if ( builder != null ) {
+			Serializable id = session.getIdentifier( entity );
+			Document doc = builder.getDocument( entity, id );
+			UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
+			processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
+		}
+		//TODO
+		//need to add elements in a queue kept at the Session level
+		//the queue will be processed by a Lucene(Auto)FlushEventListener
+		//note that we could keep this queue somewhere in the event listener in the mean time but that requires
+		// a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
+		//an other solution would be to subclass SessionImpl instead of having this LuceneSession delecation model
+		// this is an open discussion
+	}
+
+	private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
+							 Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+		if ( session.isTransactionInProgress() ) {
+			if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
+				postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
+				session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
+			}
+			postTransactionWorkQueueSynch.add( work );
+		}
+		else {
+			//no transaction work right away
+			PostTransactionWorkQueueSynchronization sync =
+					createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
+			sync.add( work );
+			sync.afterCompletion( Status.STATUS_COMMITTED );
+		}
+	}
+
+	private PostTransactionWorkQueueSynchronization createWorkQueueSync(
+			Map<Class, DocumentBuilder<Object>> documentBuilders,
+			Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+		WorkQueue workQueue = new BatchLuceneWorkQueue( documentBuilders, lockableDirectoryProviders );
+		return new PostTransactionWorkQueueSynchronization( workQueue );
+	}
+
+	public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
+		return session.createSQLQuery( sql, returnAlias, returnClass );
+	}
+
+	public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
+		return session.createSQLQuery( sql, returnAliases, returnClasses );
+	}
+
+	public int delete(String query) throws HibernateException {
+		return session.delete( query );
+	}
+
+	public int delete(String query, Object value, Type type) throws HibernateException {
+		return session.delete( query, value, type );
+	}
+
+	public int delete(String query, Object[] values, Type[] types) throws HibernateException {
+		return session.delete( query, values, types );
+	}
+
+	public Collection filter(Object collection, String filter) throws HibernateException {
+		return session.filter( collection, filter );
+	}
+
+	public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
+		return session.filter( collection, filter, value, type );
+	}
+
+	public Collection filter(Object collection, String filter, Object[] values, Type[] types) throws HibernateException {
+		return session.filter( collection, filter, values, types );
+	}
+
+	public List find(String query) throws HibernateException {
+		return session.find( query );
+	}
+
+	public List find(String query, Object value, Type type) throws HibernateException {
+		return session.find( query, value, type );
+	}
+
+	public List find(String query, Object[] values, Type[] types) throws HibernateException {
+		return session.find( query, values, types );
+	}
+
+	public Iterator iterate(String query) throws HibernateException {
+		return session.iterate( query );
+	}
+
+	public Iterator iterate(String query, Object value, Type type) throws HibernateException {
+		return session.iterate( query, value, type );
+	}
+
+	public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
+		return session.iterate( query, values, types );
+	}
+
+	public void save(String entityName, Object object, Serializable id) throws HibernateException {
+		session.save( entityName, object, id );
+	}
+
+	public void save(Object object, Serializable id) throws HibernateException {
+		session.save( object, id );
+	}
+
+	public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
+		return session.saveOrUpdateCopy( entityName, object );
+	}
+
+	public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
+		return session.saveOrUpdateCopy( entityName, object, id );
+	}
+
+	public Object saveOrUpdateCopy(Object object) throws HibernateException {
+		return session.saveOrUpdateCopy( object );
+	}
+
+	public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
+		return session.saveOrUpdateCopy( object, id );
+	}
+
+	public void update(String entityName, Object object, Serializable id) throws HibernateException {
+		session.update( entityName, object, id );
+	}
+
+	public void update(Object object, Serializable id) throws HibernateException {
+		session.update( object, id );
+	}
+
+	public Transaction beginTransaction() throws HibernateException {
+		return session.beginTransaction();
+	}
+
+	public void cancelQuery() throws HibernateException {
+		session.cancelQuery();
+	}
+
+	public void clear() {
+		session.clear();
+	}
+
+	public Connection close() throws HibernateException {
+		return session.close();
+	}
+
+	public Connection connection() throws HibernateException {
+		return session.connection();
+	}
+
+	public boolean contains(Object object) {
+		return session.contains( object );
+	}
+
+	public Criteria createCriteria(String entityName) {
+		return session.createCriteria( entityName );
+	}
+
+	public Criteria createCriteria(String entityName, String alias) {
+		return session.createCriteria( entityName, alias );
+	}
+
+	public Criteria createCriteria(Class persistentClass) {
+		return session.createCriteria( persistentClass );
+	}
+
+	public Criteria createCriteria(Class persistentClass, String alias) {
+		return session.createCriteria( persistentClass, alias );
+	}
+
+	public Query createFilter(Object collection, String queryString) throws HibernateException {
+		return session.createFilter( collection, queryString );
+	}
+
+	public Query createQuery(String queryString) throws HibernateException {
+		return session.createQuery( queryString );
+	}
+
+	public SQLQuery createSQLQuery(String queryString) throws HibernateException {
+		return session.createSQLQuery( queryString );
+	}
+
+	public void delete(String entityName, Object object) throws HibernateException {
+		session.delete( entityName, object );
+	}
+
+	public void delete(Object object) throws HibernateException {
+		session.delete( object );
+	}
+
+	public void disableFilter(String filterName) {
+		session.disableFilter( filterName );
+	}
+
+	public Connection disconnect() throws HibernateException {
+		return session.disconnect();
+	}
+
+	public Filter enableFilter(String filterName) {
+		return session.enableFilter( filterName );
+	}
+
+	public void evict(Object object) throws HibernateException {
+		session.evict( object );
+	}
+
+	public void flush() throws HibernateException {
+		session.flush();
+	}
+
+	public Object get(Class clazz, Serializable id) throws HibernateException {
+		return session.get( clazz, id );
+	}
+
+	public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
+		return session.get( clazz, id, lockMode );
+	}
+
+	public Object get(String entityName, Serializable id) throws HibernateException {
+		return session.get( entityName, id );
+	}
+
+	public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+		return session.get( entityName, id, lockMode );
+	}
+
+	public CacheMode getCacheMode() {
+		return session.getCacheMode();
+	}
+
+	public LockMode getCurrentLockMode(Object object) throws HibernateException {
+		return session.getCurrentLockMode( object );
+	}
+
+	public Filter getEnabledFilter(String filterName) {
+		return session.getEnabledFilter( filterName );
+	}
+
+	public EntityMode getEntityMode() {
+		return session.getEntityMode();
+	}
+
+	public String getEntityName(Object object) throws HibernateException {
+		return session.getEntityName( object );
+	}
+
+	public FlushMode getFlushMode() {
+		return session.getFlushMode();
+	}
+
+	public Serializable getIdentifier(Object object) throws HibernateException {
+		return session.getIdentifier( object );
+	}
+
+	public Query getNamedQuery(String queryName) throws HibernateException {
+		return session.getNamedQuery( queryName );
+	}
+
+	public org.hibernate.Session getSession(EntityMode entityMode) {
+		return session.getSession( entityMode );
+	}
+
+	public SessionFactory getSessionFactory() {
+		return session.getSessionFactory();
+	}
+
+	public SessionStatistics getStatistics() {
+		return session.getStatistics();
+	}
+
+	public Transaction getTransaction() {
+		return session.getTransaction();
+	}
+
+	public boolean isConnected() {
+		return session.isConnected();
+	}
+
+	public boolean isDirty() throws HibernateException {
+		return session.isDirty();
+	}
+
+	public boolean isOpen() {
+		return session.isOpen();
+	}
+
+	public Object load(String entityName, Serializable id) throws HibernateException {
+		return session.load( entityName, id );
+	}
+
+	public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+		return session.load( entityName, id, lockMode );
+	}
+
+	public void load(Object object, Serializable id) throws HibernateException {
+		session.load( object, id );
+	}
+
+	public Object load(Class theClass, Serializable id) throws HibernateException {
+		return session.load( theClass, id );
+	}
+
+	public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
+		return session.load( theClass, id, lockMode );
+	}
+
+	public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
+		session.lock( entityName, object, lockMode );
+	}
+
+	public void lock(Object object, LockMode lockMode) throws HibernateException {
+		session.lock( object, lockMode );
+	}
+
+	public Object merge(String entityName, Object object) throws HibernateException {
+		return session.merge( entityName, object );
+	}
+
+	public Object merge(Object object) throws HibernateException {
+		return session.merge( object );
+	}
+
+	public void persist(String entityName, Object object) throws HibernateException {
+		session.persist( entityName, object );
+	}
+
+	public void persist(Object object) throws HibernateException {
+		session.persist( object );
+	}
+
+	public void reconnect() throws HibernateException {
+		session.reconnect();
+	}
+
+	public void reconnect(Connection connection) throws HibernateException {
+		session.reconnect( connection );
+	}
+
+	public void refresh(Object object) throws HibernateException {
+		session.refresh( object );
+	}
+
+	public void refresh(Object object, LockMode lockMode) throws HibernateException {
+		session.refresh( object, lockMode );
+	}
+
+	public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
+		session.replicate( entityName, object, replicationMode );
+	}
+
+	public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
+		session.replicate( object, replicationMode );
+	}
+
+	public Serializable save(String entityName, Object object) throws HibernateException {
+		return session.save( entityName, object );
+	}
+
+	public Serializable save(Object object) throws HibernateException {
+		return session.save( object );
+	}
+
+	public void saveOrUpdate(String entityName, Object object) throws HibernateException {
+		session.saveOrUpdate( entityName, object );
+	}
+
+	public void saveOrUpdate(Object object) throws HibernateException {
+		session.saveOrUpdate( object );
+	}
+
+	public void setCacheMode(CacheMode cacheMode) {
+		session.setCacheMode( cacheMode );
+	}
+
+	public void setFlushMode(FlushMode flushMode) {
+		session.setFlushMode( flushMode );
+	}
+
+	public void setReadOnly(Object entity, boolean readOnly) {
+		session.setReadOnly( entity, readOnly );
+	}
+
+	public void update(String entityName, Object object) throws HibernateException {
+		session.update( entityName, object );
+	}
+
+	public void update(Object object) throws HibernateException {
+		session.update( object );
+	}
+}

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -34,7 +34,7 @@
 import org.hibernate.engine.query.ParameterMetadata;
 import org.hibernate.impl.AbstractQueryImpl;
 import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 import org.hibernate.search.util.ContextHelper;
 
 /**
@@ -71,7 +71,7 @@
 		//user stop using it
 		//scrollable is better in this area
 
-		FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );
+		FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
 		//find the directories
 		Searcher searcher = buildSearcher( listener );
 		try {
@@ -103,9 +103,9 @@
 		}
 	}
 
-	public ScrollableResults scroll() throws HibernateException {
+		public ScrollableResults scroll() throws HibernateException {
 		//keep the searcher open until the resultset is closed
-		FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );;
+		FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
 		//find the directories
 		Searcher searcher = buildSearcher( listener );
 		Hits hits;
@@ -134,7 +134,7 @@
 	}
 
 	public List list() throws HibernateException {
-		FullTextEventListener listener = ContextHelper.getLuceneEventListener( session );;
+		FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
 		//find the directories
 		Searcher searcher = buildSearcher( listener );
 		Hits hits;
@@ -210,7 +210,7 @@
 	}
 
 	//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
-	private Searcher buildSearcher(FullTextEventListener listener) {
+	private Searcher buildSearcher(FullTextIndexEventListener listener) {
 		Map<Class, DocumentBuilder<Object>> builders = listener.getDocumentBuilders();
 		Set<Directory> directories = new HashSet<Directory>();
 		if ( classes == null || classes.length == 0 ) {

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/IteratorImpl.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -9,6 +9,7 @@
 /**
  * @author Emmanuel Bernard
  */
+//TODO load the next batch-size elements to benefit from batch-size 
 public class IteratorImpl implements Iterator {
 
 	private final List<EntityInfo> entityInfos;

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -18,7 +18,7 @@
 import org.hibernate.ScrollableResults;
 import org.hibernate.Session;
 import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 import org.hibernate.type.Type;
 
 /**
@@ -31,11 +31,11 @@
 	private final int max;
 	private int current;
 	private final Session session;
-	private final FullTextEventListener listener;
+	private final FullTextIndexEventListener listener;
 	private EntityInfo[] entityInfos;
 
 	public ScrollableResultsImpl(
-			Searcher searcher, Hits hits, int first, int max, Session session, FullTextEventListener listener
+			Searcher searcher, Hits hits, int first, int max, Session session, FullTextIndexEventListener listener
 	) {
 		this.searcher = searcher;
 		this.hits = hits;

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -4,20 +4,20 @@
 import org.hibernate.HibernateException;
 import org.hibernate.engine.SessionImplementor;
 import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 
 /**
  * @author Emmanuel Bernard
  */
 public abstract class ContextHelper {
 
-	public static FullTextEventListener getLuceneEventListener(SessionImplementor session) {
+	public static FullTextIndexEventListener getLuceneEventListener(SessionImplementor session) {
 		PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
-		FullTextEventListener listener = null;
+		FullTextIndexEventListener listener = null;
 		//FIXME this sucks since we mandante the event listener use
 		for ( PostInsertEventListener candidate : listeners ) {
-			if ( candidate instanceof FullTextEventListener ) {
-				listener = (FullTextEventListener) candidate;
+			if ( candidate instanceof FullTextIndexEventListener ) {
+				listener = (FullTextIndexEventListener) candidate;
 				break;
 			}
 		}

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/FSDirectoryTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -19,7 +19,7 @@
 import org.hibernate.event.PostUpdateEventListener;
 import org.hibernate.search.Environment;
 import org.hibernate.search.store.FSDirectoryProvider;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 
 /**
  * @author Gavin King
@@ -183,7 +183,7 @@
 		cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
 		cfg.setProperty( "hibernate.search.Clock.directory_provider", FSDirectoryProvider.class.getName() );
 		cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
-		FullTextEventListener del = new FullTextEventListener();
+		FullTextIndexEventListener del = new FullTextIndexEventListener();
 		cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
 		cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
 		cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -8,7 +8,7 @@
 import org.hibernate.event.PostUpdateEventListener;
 import org.hibernate.search.Environment;
 import org.hibernate.search.store.RAMDirectoryProvider;
-import org.hibernate.search.event.FullTextEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
 import org.hibernate.HibernateException;
 import org.hibernate.impl.SessionFactoryImpl;
 
@@ -25,13 +25,13 @@
 		return getLuceneEventListener().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
 	}
 
-	private FullTextEventListener getLuceneEventListener() {
+	private FullTextIndexEventListener getLuceneEventListener() {
         PostInsertEventListener[] listeners = ( (SessionFactoryImpl) getSessions() ).getEventListeners().getPostInsertEventListeners();
-        FullTextEventListener listener = null;
+        FullTextIndexEventListener listener = null;
         //FIXME this sucks since we mandante the event listener use
         for (PostInsertEventListener candidate : listeners) {
-            if (candidate instanceof FullTextEventListener ) {
-                listener = (FullTextEventListener) candidate;
+            if (candidate instanceof FullTextIndexEventListener ) {
+                listener = (FullTextIndexEventListener) candidate;
                 break;
             }
         }
@@ -42,7 +42,7 @@
 	protected void configure(org.hibernate.cfg.Configuration cfg) {
 		cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
 		cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
-		FullTextEventListener del = new FullTextEventListener();
+		FullTextIndexEventListener del = new FullTextIndexEventListener();
 		cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
 		cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
 		cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/BridgeTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -11,8 +11,9 @@
 import org.hibernate.cfg.Configuration;
 
 import org.hibernate.search.test.TestCase;
+import org.hibernate.search.Environment;
 import org.hibernate.search.FullTextSession;
-import org.hibernate.search.Environment;
+import org.hibernate.search.Search;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.queryParser.QueryParser;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -41,17 +42,17 @@
         tx.commit();
 
         tx = s.beginTransaction();
-        FullTextSession session = new FullTextSession(s);
+        FullTextSession session = Search.createFullTextSession(s);
         QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
         Query query;
         List result;
 
         query = parser.parse("double2:[2.1 TO 2.1] AND float2:[2.1 TO 2.1] AND int2:[2 TO 2.1] AND long2:[2 TO 2.1]");
-        result = session.createLuceneQuery(query).list();
+        result = session.createFullTextQuery(query).list();
         assertEquals( "find primitives and do not fail on null", 1, result.size() );
 
         query = parser.parse("double1:[2.1 TO 2.1] OR float1:[2.1 TO 2.1] OR int1:[2 TO 2.1] OR long1:[2 TO 2.1]");
-        result = session.createLuceneQuery(query).list();
+        result = session.createFullTextQuery(query).list();
         assertEquals( "null elements should not be stored", 0, result.size() ); //the query is dumb because restrictive
 
         s.delete( s.get( Cloud.class, cloud.getId() ) );
@@ -71,17 +72,17 @@
         tx.commit();
 
         tx = s.beginTransaction();
-        FullTextSession session = new FullTextSession(s);
+        FullTextSession session = Search.createFullTextSession(s);
         QueryParser parser = new QueryParser("id", new SimpleAnalyzer() );
         Query query;
         List result;
 
         query = parser.parse("customFieldBridge:This AND customStringBridge:This");
-        result = session.createLuceneQuery(query).list();
+        result = session.createFullTextQuery(query).list();
         assertEquals( "Properties not mapped", 1, result.size() );
 
         query = parser.parse("customFieldBridge:by AND customStringBridge:is");
-        result = session.createLuceneQuery(query).list();
+        result = session.createFullTextQuery(query).list();
         assertEquals( "Custom types not taken into account", 0, result.size() );
 
         s.delete( s.get( Cloud.class, cloud.getId() ) );
@@ -113,7 +114,7 @@
         tx.commit();
 
         tx = s.beginTransaction();
-        FullTextSession session = new FullTextSession(s);
+        FullTextSession session = Search.createFullTextSession(s);
         QueryParser parser = new QueryParser("id", new StandardAnalyzer() );
         Query query;
         List result;
@@ -127,7 +128,7 @@
                 + " AND dateSecond:[20001214 TO 20001215034302]"
                 + " AND dateMillisecond:[20001214 TO 20001215034302005]"
         );
-        result = session.createLuceneQuery(query).list();
+        result = session.createFullTextQuery(query).list();
         assertEquals( "Date not found or not property truncated", 1, result.size() );
 
         s.delete( s.get( Cloud.class, cloud.getId() ) );

Added: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/DateSplitBridge.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,43 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Store the date in 3 different field year, month, day
+ * to ease Range Query per year, month or day
+ * (eg get all the elements of december for the last 5 years)
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+	private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+	public void set(String name, Object value, Document document, Field.Store store, Field.Index index, Float boost) {
+		Date date = (Date) value;
+		Calendar cal = GregorianCalendar.getInstance( GMT );
+		cal.setTime( date );
+		int year = cal.get( Calendar.YEAR );
+		int month = cal.get( Calendar.MONTH ) + 1;
+		int day = cal.get( Calendar.DAY_OF_MONTH );
+		//set year
+		Field field = new Field( name + ".year", String.valueOf(year), store, index );
+		if ( boost != null ) field.setBoost( boost );
+		document.add( field );
+		//set month and pad it if needed
+		field = new Field( name + ".month", month < 10 ? "0" : "" + String.valueOf(month), store, index );
+		if ( boost != null ) field.setBoost( boost );
+		document.add( field );
+		//set day and pad it if needed
+		field = new Field( name + ".day", day < 10 ? "0" : "" + String.valueOf(day), store, index );
+		if ( boost != null ) field.setBoost( boost );
+		document.add( field );
+	}
+}

Added: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/bridge/PaddedIntegerBridge.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -0,0 +1,44 @@
+//$Id: $
+package org.hibernate.search.test.bridge;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.annotations.Parameter;
+
+/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+	public static String PADDING_PROPERTY = "padding";
+
+	private int padding = 5; //default
+
+	public void setParameterValues(Map parameters) {
+		Object padding = parameters.get( PADDING_PROPERTY );
+		if (padding != null) this.padding = (Integer) padding;
+	}
+
+	public String objectToString(Object object) {
+		String rawInteger = ( (Integer) object ).toString();
+		if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+		StringBuilder paddedInteger = new StringBuilder( );
+		for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+			paddedInteger.append('0');
+		}
+		return paddedInteger.append( rawInteger ).toString();
+	}
+
+	public Object stringToObject(String stringValue) {
+		return new Integer(stringValue);
+	}
+}

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/fieldAccess/FieldAccessTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -5,6 +5,7 @@
 
 import org.hibernate.search.test.TestCase;
 import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
 import org.hibernate.Session;
 import org.hibernate.Transaction;
 import org.apache.lucene.queryParser.QueryParser;
@@ -24,10 +25,10 @@
 
         s.clear();
 
-        FullTextSession session = new FullTextSession(s);
+        FullTextSession session = Search.createFullTextSession(s);
         tx = session.beginTransaction();
         QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
-        List result = session.createLuceneQuery( p.parse( "Abstract:Hibernate" ) ).list();
+        List result = session.createFullTextQuery( p.parse( "Abstract:Hibernate" ) ).list();
         assertEquals( "Query by field", 1, result.size() );
         s.delete( result.get( 0 ) );
         tx.commit();
@@ -48,10 +49,10 @@
 
         s.clear();
 
-        FullTextSession session = new FullTextSession(s);
+        FullTextSession session = Search.createFullTextSession(s);
         tx = session.beginTransaction();
         QueryParser p = new QueryParser("id", new StandardAnalyzer( ) );
-        List result = session.createLuceneQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
+        List result = session.createFullTextQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
         assertEquals( "Query by field", 2, result.size() );
         assertEquals( "@Boost fails", "Hibernate in Action", ( (Document) result.get( 0 ) ).getTitle() );
         s.delete( result.get( 0 ) );

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/inheritance/InheritanceTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -3,6 +3,7 @@
 
 import org.hibernate.search.test.TestCase;
 import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
 import org.hibernate.Transaction;
 import org.apache.lucene.queryParser.QueryParser;
 import org.apache.lucene.analysis.StopAnalyzer;
@@ -16,7 +17,7 @@
 public class InheritanceTest extends TestCase {
 
 	public void testInheritance() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Animal a = new Animal();
         a.setName("Shark Jr");
@@ -34,19 +35,19 @@
 		org.hibernate.Query hibQuery;
 
         query = parser.parse( "Elephant" );
-		hibQuery = s.createLuceneQuery( query, Mammal.class );
+		hibQuery = s.createFullTextQuery( query, Mammal.class );
 		List result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query subclass by superclass attribute", 1, result.size() );
 
         query = parser.parse( "mammalNbr:[2 TO 2]" );
-		hibQuery = s.createLuceneQuery( query, Animal.class, Mammal.class );
+		hibQuery = s.createFullTextQuery( query, Animal.class, Mammal.class );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query subclass by subclass attribute", 1, result.size() );
 
         query = parser.parse( "Jr" );
-		hibQuery = s.createLuceneQuery( query, Animal.class );
+		hibQuery = s.createFullTextQuery( query, Animal.class );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query filtering on superclass return mapped subclasses", 2, result.size() );

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/query/LuceneQueryTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -6,6 +6,7 @@
 
 import org.hibernate.search.test.TestCase;
 import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
 import org.hibernate.Transaction;
 import org.hibernate.Hibernate;
 import org.hibernate.ScrollableResults;
@@ -20,7 +21,7 @@
 public class LuceneQueryTest extends TestCase {
 
 	public void testList() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Clock clock = new Clock(1, "Seiko");
 		s.save( clock );
@@ -36,19 +37,19 @@
 		QueryParser parser = new QueryParser("title", new StopAnalyzer() );
 
 		Query query = parser.parse( "summary:noword" );
-		org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		List result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( 0, result.size() );
 
 		query = parser.parse( "summary:Festina Or brand:Seiko" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query with explicit class filter", 2, result.size() );
 
         query = parser.parse( "summary:Festina Or brand:Seiko" );
-		hibQuery = s.createLuceneQuery( query );
+		hibQuery = s.createFullTextQuery( query );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query with no class filter", 2, result.size() );
@@ -62,7 +63,7 @@
 	}
 
 	public void testFirstMax() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Clock clock = new Clock(1, "Seiko");
 		s.save( clock );
@@ -78,7 +79,7 @@
 		QueryParser parser = new QueryParser("title", new StopAnalyzer() );
 
 		Query query = parser.parse( "summary:Festina Or brand:Seiko" );
-		org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		hibQuery.setFirstResult( 1 );
 		List result = hibQuery.list();
 		assertNotNull( result );
@@ -108,7 +109,7 @@
 	}
 
 	public void testIterator() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Clock clock = new Clock(1, "Seiko");
 		s.save( clock );
@@ -124,13 +125,13 @@
 		QueryParser parser = new QueryParser("title", new StopAnalyzer() );
 
 		Query query = parser.parse( "summary:noword" );
-		org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		Iterator result = hibQuery.iterate();
 		assertNotNull( result );
 		assertFalse( result.hasNext() );
 
 		query = parser.parse( "summary:Festina Or brand:Seiko" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		result = hibQuery.iterate();
 		assertNotNull( result );
 		int index = 0;
@@ -145,7 +146,7 @@
 	}
 
 	public void testScrollableResultSet() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Clock clock = new Clock(1, "Seiko");
 		s.save( clock );
@@ -161,7 +162,7 @@
 		QueryParser parser = new QueryParser("title", new StopAnalyzer() );
 
 		Query query = parser.parse( "summary:noword" );
-		org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		ScrollableResults result = hibQuery.scroll();
 		assertNotNull( result );
 		assertEquals(-1, result.getRowNumber() );
@@ -169,7 +170,7 @@
 		result.close();
 
 		query = parser.parse( "summary:Festina Or brand:Seiko" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		result = hibQuery.scroll();
 		assertEquals(0, result.getRowNumber() );
 		result.beforeFirst();
@@ -188,7 +189,7 @@
 	}
 
 	public void testMultipleEntityPerIndex() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		Clock clock = new Clock(1, "Seiko");
 		s.save( clock );
@@ -202,20 +203,20 @@
 		QueryParser parser = new QueryParser("title", new StopAnalyzer() );
 
 		Query query = parser.parse( "summary:Festina" );
-		org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		List result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query with explicit class filter", 1, result.size() );
 		
 		query = parser.parse( "summary:Festina" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		Iterator it = hibQuery.iterate();
 		assertTrue( it.hasNext() );
 		assertNotNull( it.next() );
 		assertFalse( it.hasNext() );
 
 		query = parser.parse( "summary:Festina" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		ScrollableResults sr = hibQuery.scroll();
 		assertTrue( sr.first() );
 		assertNotNull( sr.get() );
@@ -223,14 +224,14 @@
 		sr.close();
 
 		query = parser.parse( "summary:Festina OR brand:seiko" );
-		hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+		hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
 		hibQuery.setMaxResults( 2 );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query with explicit class filter and limit", 2, result.size() );
 
 		query = parser.parse( "summary:Festina" );
-		hibQuery = s.createLuceneQuery( query );
+		hibQuery = s.createFullTextQuery( query );
 		result = hibQuery.list();
 		assertNotNull( result );
 		assertEquals( "Query with no class filter", 2, result.size() );

Modified: branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java
===================================================================
--- branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java	2006-11-22 12:54:10 UTC (rev 10864)
+++ branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/search/test/session/MassIndexTest.java	2006-11-23 22:30:01 UTC (rev 10865)
@@ -4,7 +4,9 @@
 import java.util.List;
 
 import org.hibernate.search.test.TestCase;
+import org.hibernate.search.impl.FullTextSessionImpl;
 import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
 import org.hibernate.Transaction;
 import org.apache.lucene.queryParser.QueryParser;
 import org.apache.lucene.analysis.StopAnalyzer;
@@ -15,7 +17,7 @@
 public class MassIndexTest extends TestCase {
 
 	public void testTransactional() throws Exception {
-		FullTextSession s = new FullTextSession( openSession() );
+		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();
 		int loop = 4;
 		for (int i = 0 ; i < loop; i++) {
@@ -27,16 +29,16 @@
 		tx.commit();
 		s.close();
 
-		s = new FullTextSession( openSession() );
+		s = new FullTextSessionImpl( openSession() );
 		s.getTransaction().begin();
 		s.connection().createStatement().executeUpdate( "update Email set body='Meet the guys who write the software'");
 		s.getTransaction().commit();
 		s.close();
 
-		s = new FullTextSession( openSession() );
+		s = new FullTextSessionImpl( openSession() );
 		tx = s.beginTransaction();
 		QueryParser parser = new QueryParser("id", new StopAnalyzer() );
-		List result = s.createLuceneQuery( parser.parse( "body:write" ) ).list();
+		List result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
 		assertEquals( 0, result.size() );
 		result = s.createCriteria( Email.class ).list();
 		for (int i = 0 ; i < loop/2 ; i++)
@@ -49,9 +51,9 @@
 		tx.commit(); //do the process
 		s.close();
 
-		s = new FullTextSession( openSession() );
+		s = new FullTextSessionImpl( openSession() );
 		tx = s.beginTransaction();
-		result = s.createLuceneQuery( parser.parse( "body:write" ) ).list();
+		result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
 		assertEquals( loop, result.size() );
 		for (Object o : result) s.delete( o );
 		tx.commit();




More information about the hibernate-commits mailing list