[hibernate-commits] Hibernate SVN: r12844 - in trunk/HibernateExt/search/src: java/org/hibernate/search/backend and 8 other directories.

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Sun Jul 29 10:56:47 EDT 2007


Author: epbernard
Date: 2007-07-29 10:56:47 -0400 (Sun, 29 Jul 2007)
New Revision: 12844

Added:
   trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderShardingStrategy.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/store/IdHashShardingStrategy.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/store/NotShardedStrategy.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/Animal.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/ShardsTest.java
Modified:
   trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/AddLuceneWork.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/DeleteLuceneWork.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/OptimizeLuceneWork.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
Log:
HSEARCH-86 introduce Index Sharding, including sharding strategy

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -19,7 +19,7 @@
 	 * Provide access to the DirectoryProvider (hence the Lucene Directory)
 	 * for a given entity
 	 */
-	DirectoryProvider getDirectoryProvider(Class entity);
+	DirectoryProvider[] getDirectoryProviders(Class entity);
 
 	/**
 	 * Optimize all indexes

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/AddLuceneWork.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/AddLuceneWork.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/AddLuceneWork.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -9,7 +9,7 @@
  * @author Emmanuel Bernard
  */
 public class AddLuceneWork extends LuceneWork {
-	public AddLuceneWork(Serializable id, Class entity, Document document) {
-		super( id, entity, document );
+	public AddLuceneWork(Serializable id, String idInString, Class entity, Document document) {
+		super( id, idInString, entity, document );
 	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/DeleteLuceneWork.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/DeleteLuceneWork.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/DeleteLuceneWork.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -7,7 +7,7 @@
  * @author Emmanuel Bernard
  */
 public class DeleteLuceneWork extends LuceneWork {
-	public DeleteLuceneWork(Serializable id, Class entity) {
-		super( id, entity );
+	public DeleteLuceneWork(Serializable id, String idInString, Class entity) {
+		super( id, idInString, entity );
 	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -21,13 +21,15 @@
 	 * Flag indicating if this lucene work has to be indexed in batch mode.
 	 */
 	private boolean batch = false;
+	private String idInString;
 
-	public LuceneWork(Serializable id, Class entity) {
-		this( id, entity, null );
+	public LuceneWork(Serializable id, String idInString, Class entity) {
+		this( id, idInString, entity, null );
 	}
 
-	public LuceneWork(Serializable id, Class entity, Document document) {
+	public LuceneWork(Serializable id, String idInString, Class entity, Document document) {
 		this.id = id;
+		this.idInString = idInString;
 		this.entityClass = entity;
 		this.document = document;
 	}
@@ -51,4 +53,8 @@
 	public Serializable getId() {
 		return id;
 	}
+
+	public String getIdInString() {
+		return idInString;
+	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/OptimizeLuceneWork.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/OptimizeLuceneWork.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/OptimizeLuceneWork.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -9,6 +9,6 @@
  */
 public class OptimizeLuceneWork extends LuceneWork {
 	public OptimizeLuceneWork(Class entity) {
-		super( null, entity );
+		super( null, null, entity );
 	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -2,10 +2,13 @@
 package org.hibernate.search.backend;
 
 import java.io.IOException;
+import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Set;
+import java.util.HashSet;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
@@ -14,6 +17,7 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.document.Document;
 import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.engine.SearchFactoryImplementor;
 import org.hibernate.search.SearchException;
@@ -65,18 +69,13 @@
 	}
 
 	/**
-	 * Retrieve an IndexWriter for a given entity
-	 *
-	 * @param entity entity processed
-	 * @param modificationOperation true if the modification is expected to be an idnex state change
-	 * @return appropriate indexWriter
+	 * retrieve a read write IndexReader
+	 * For a given DirectoryProvider, An IndexReader must be used before an IndexWriter
 	 */
-	public IndexReader getIndexReader(Class entity, boolean modificationOperation) {
-		//TODO NPEs
-		DirectoryProvider provider = searchFactoryImplementor.getDirectoryProvider( entity );
+	public IndexReader getIndexReader(DirectoryProvider provider, Class entity) {
 		//one cannot access a reader for update after a writer has been accessed
 		if ( writers.containsKey( provider ) )
-			throw new AssertionFailure("Tries to read for update a index while a writer is accessed" + entity);
+			throw new AssertionFailure("Tries to read for update an index while a writer is accessed" + entity);
 		IndexReader reader = readers.get( provider );
 		if ( reader != null ) return reader;
 		lockProvider( provider );
@@ -91,24 +90,16 @@
 		return reader;
 	}
 
-	/**
-	 * Retrieve an IndexWriter for a given entity
-	 *
-	 * @param entity entity processed
-	 * @param modificationOperation true if the modification is expected to be an idnex state change
-	 * @return appropriate indexWriter
-	 */
-	public IndexWriter getIndexWriter(Class entity, boolean modificationOperation) {
-		DirectoryProvider provider = searchFactoryImplementor.getDirectoryProvider( entity );
-		return getIndexWriter( provider, entity, modificationOperation );
-	}
-
 	//for optimization
 	public IndexWriter getIndexWriter(DirectoryProvider provider) {
 		return getIndexWriter( provider, null, false );
 	}
 
-	private IndexWriter getIndexWriter(DirectoryProvider provider, Class entity, boolean modificationOperation) {
+	/**
+	 * retrieve a read write IndexWriter
+	 * For a given DirectoryProvider, An IndexReader must be used before an IndexWriter
+	 */
+	public IndexWriter getIndexWriter(DirectoryProvider provider, Class entity, boolean modificationOperation) {
 		//one has to close a reader for update before a writer is accessed
 		IndexReader reader = readers.get( provider );
 		if ( reader != null ) {
@@ -123,7 +114,7 @@
 		IndexWriter writer = writers.get( provider );
 		if ( writer != null ) return writer;
 		lockProvider( provider );
-		dpStatistics.get(provider).operations++;
+		if (modificationOperation) dpStatistics.get(provider).operations++;
 		try {
 			Analyzer analyzer = entity != null ?
 					searchFactoryImplementor.getDocumentBuilders().get( entity ).getAnalyzer() :
@@ -231,7 +222,15 @@
 	}
 
 	public void optimize(Class entity) {
-		DirectoryProvider provider = searchFactoryImplementor.getDirectoryProvider( entity );
+		DocumentBuilder documentBuilders = searchFactoryImplementor.getDocumentBuilders().get( entity );
+		DirectoryProvider[] providers = documentBuilders.getDirectoryProviderSelectionStrategy()
+				.getDirectoryProvidersForOptimization( entity );
+		for (DirectoryProvider provider: providers) {
+			optimize( provider );
+		}
+	}
+
+	private void optimize(DirectoryProvider provider) {
 		OptimizerStrategy optimizerStrategy = searchFactoryImplementor.getOptimizerStrategy( provider );
 		dpStatistics.get(provider).optimizationForced = true;
 		optimizerStrategy.optimizationForced();

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -4,6 +4,7 @@
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -11,8 +12,12 @@
 import org.hibernate.search.backend.LuceneWork;
 import org.hibernate.search.backend.Workspace;
 import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
 import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderShardingStrategy;
+import org.hibernate.annotations.common.AssertionFailure;
 
 /**
  * Apply the operations to Lucene directories avoiding deadlocks.
@@ -41,9 +46,45 @@
 		workspace = new Workspace( searchFactoryImplementor );
 		worker = new LuceneWorker( workspace );
 		try {
-			deadlockFreeQueue(queue, workspace, searchFactoryImplementor);		
+			List<LuceneWorker.WorkWithPayload> queueWithFlatDPs = new ArrayList<LuceneWorker.WorkWithPayload>( queue.size()*2 );
+			for ( LuceneWork work : queue ) {
+				DocumentBuilder documentBuilder = searchFactoryImplementor.getDocumentBuilders().get( work.getEntityClass() );
+				DirectoryProviderShardingStrategy shardingStrategy = documentBuilder.getDirectoryProviderSelectionStrategy();
+
+				if ( AddLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
+					DirectoryProvider provider = shardingStrategy.getDirectoryProviderForAddition(
+							work.getEntityClass(),
+							work.getId(),
+							work.getIdInString(),
+							work.getDocument()
+					);
+					queueWithFlatDPs.add( new LuceneWorker.WorkWithPayload(work, provider) );
+				}
+				else if ( DeleteLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
+					DirectoryProvider[] providers = shardingStrategy.getDirectoryProvidersForDeletion(
+							work.getEntityClass(),
+							work.getId(),
+							work.getIdInString()
+					);
+					for (DirectoryProvider provider : providers) {
+						queueWithFlatDPs.add( new LuceneWorker.WorkWithPayload(work, provider) );
+					}
+				}
+				else if ( OptimizeLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
+					DirectoryProvider[] providers = shardingStrategy.getDirectoryProvidersForOptimization(
+							work.getEntityClass()
+					);
+					for (DirectoryProvider provider : providers) {
+						queueWithFlatDPs.add( new LuceneWorker.WorkWithPayload(work, provider) );
+					}
+				}
+				else {
+					throw new AssertionFailure( "Unknown work type: " + work.getClass() );
+				}
+			}
+			deadlockFreeQueue(queueWithFlatDPs, searchFactoryImplementor);
 			checkForBatchIndexing(workspace);		
-			for ( LuceneWork luceneWork : queue ) {
+			for ( LuceneWorker.WorkWithPayload luceneWork : queueWithFlatDPs ) {
 				worker.performWork( luceneWork );
 			}
 		}
@@ -57,8 +98,9 @@
 		for ( LuceneWork luceneWork : queue ) {
 			// if there is at least a single batch index job we put the work space into batch indexing mode.
 			if(luceneWork.isBatch()){
-				log.debug("Setting batch indexing mode.");
+				log.trace("Setting batch indexing mode.");
 				workspace.setBatch(true);
+				break;
 			}
 		}
 	}
@@ -68,9 +110,9 @@
 	 * dead lock between concurrent threads or processes
 	 * To achieve that, the work will be done per directory provider
 	 */
-	private void deadlockFreeQueue(List<LuceneWork> queue, final Workspace workspace, final SearchFactoryImplementor searchFactoryImplementor) {
-		Collections.sort( queue, new Comparator<LuceneWork>() {
-			public int compare(LuceneWork o1, LuceneWork o2) {
+	private void deadlockFreeQueue(List<LuceneWorker.WorkWithPayload> queue, final SearchFactoryImplementor searchFactoryImplementor) {
+		Collections.sort( queue, new Comparator<LuceneWorker.WorkWithPayload>() {
+			public int compare(LuceneWorker.WorkWithPayload o1, LuceneWorker.WorkWithPayload o2) {
 				long h1 = getWorkHashCode( o1, searchFactoryImplementor );
 				long h2 = getWorkHashCode( o2, searchFactoryImplementor );
 				return h1 < h2 ?
@@ -82,14 +124,13 @@
 		} );
 	}
 
-	private long getWorkHashCode(LuceneWork luceneWork, SearchFactoryImplementor searchFactoryImplementor) {
-		Class entity = luceneWork.getEntityClass();
-		DirectoryProvider provider = searchFactoryImplementor.getDirectoryProvider( entity );
+	private long getWorkHashCode(LuceneWorker.WorkWithPayload luceneWork, SearchFactoryImplementor searchFactoryImplementor) {
+		DirectoryProvider provider = luceneWork.getProvider();
 		int h = provider.getClass().hashCode();
 		h = 31 * h + provider.hashCode();
 		long extendedHash = h; //to be sure extendedHash + 1 < extendedHash + 2 is always true
-		if ( luceneWork instanceof AddLuceneWork ) extendedHash+=1; //addwork after deleteWork
-		if ( luceneWork instanceof OptimizeLuceneWork ) extendedHash+=2; //optimize after everything
+		if ( luceneWork.getWork() instanceof AddLuceneWork ) extendedHash+=1; //addwork after deleteWork
+		if ( luceneWork.getWork() instanceof OptimizeLuceneWork ) extendedHash+=2; //optimize after everything
 		return extendedHash;
 	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -13,6 +13,7 @@
 import org.apache.lucene.index.TermDocs;
 import org.hibernate.annotations.common.AssertionFailure;
 import org.hibernate.search.SearchException;
+import org.hibernate.search.store.DirectoryProvider;
 import org.hibernate.search.backend.AddLuceneWork;
 import org.hibernate.search.backend.DeleteLuceneWork;
 import org.hibernate.search.backend.LuceneWork;
@@ -34,32 +35,33 @@
 		this.workspace = workspace;
 	}
 
-	public void performWork(LuceneWork luceneWork) {
-		if ( AddLuceneWork.class.isAssignableFrom( luceneWork.getClass() ) ) {
-			performWork( (AddLuceneWork) luceneWork );
+	public void performWork(WorkWithPayload luceneWork) {
+		Class workClass = luceneWork.getWork().getClass();
+		if ( AddLuceneWork.class.isAssignableFrom( workClass ) ) {
+			performWork( (AddLuceneWork) luceneWork.getWork(), luceneWork.getProvider() );
 		}
-		else if ( DeleteLuceneWork.class.isAssignableFrom( luceneWork.getClass() ) ) {
-			performWork( (DeleteLuceneWork) luceneWork );
+		else if ( DeleteLuceneWork.class.isAssignableFrom( workClass ) ) {
+			performWork( (DeleteLuceneWork) luceneWork.getWork(), luceneWork.getProvider() );
 		}
-		else if ( OptimizeLuceneWork.class.isAssignableFrom( luceneWork.getClass() ) ) {
-			performWork( (OptimizeLuceneWork) luceneWork );
+		else if ( OptimizeLuceneWork.class.isAssignableFrom( workClass ) ) {
+			performWork( (OptimizeLuceneWork) luceneWork.getWork(), luceneWork.getProvider() );
 		}
 		else {
-			throw new AssertionFailure( "Unknown work type: " + luceneWork.getClass() );
+			throw new AssertionFailure( "Unknown work type: " + workClass );
 		}
 	}
 
-	public void performWork(AddLuceneWork work) {
+	public void performWork(AddLuceneWork work, DirectoryProvider provider) {
 		Class entity = work.getEntityClass();
 		Serializable id = work.getId();
 		Document document = work.getDocument();
-		add( entity, id, document );
+		add( entity, id, document, provider );
 	}
 
-	private void add(Class entity, Serializable id, Document document) {
+	private void add(Class entity, Serializable id, Document document, DirectoryProvider provider) {
 		if ( log.isTraceEnabled() )
 			log.trace( "add to Lucene index: " + entity + "#" + id + ": " + document );
-		IndexWriter writer = workspace.getIndexWriter( entity, true );
+		IndexWriter writer = workspace.getIndexWriter( provider, entity, true );
 		try {
 			writer.addDocument( document );
 		}
@@ -68,22 +70,22 @@
 		}
 	}
 
-	public void performWork(DeleteLuceneWork work) {
+	public void performWork(DeleteLuceneWork work, DirectoryProvider provider) {
 		Class entity = work.getEntityClass();
 		Serializable id = work.getId();
-		remove( entity, id );
+		remove( entity, id, provider );
 	}
 
-	private void remove(Class entity, Serializable id) {
+	private void remove(Class entity, Serializable id, DirectoryProvider provider) {
 		/**
-		 * even with Lucene 2.1, use of indexWriter to delte is not an option
+		 * even with Lucene 2.1, use of indexWriter to delete is not an option
 		 * We can only delete by term, and the index doesn't have a termt that
 		 * uniquely identify the entry. See logic below
 		 */
 		log.trace( "remove from Lucene index: " + entity + "#" + id );
 		DocumentBuilder builder = workspace.getDocumentBuilder( entity );
 		Term term = builder.getTerm( id );
-		IndexReader reader = workspace.getIndexReader( entity, true );
+		IndexReader reader = workspace.getIndexReader( provider, entity );
 		TermDocs termDocs = null;
 		try {
 			//TODO is there a faster way?
@@ -110,15 +112,14 @@
 				log.warn( "Unable to close termDocs properly", e);
 			}
 		}
+
 	}
 
-	public void performWork(OptimizeLuceneWork work) {
+	public void performWork(OptimizeLuceneWork work, DirectoryProvider provider) {
 		Class entity = work.getEntityClass();
 		if ( log.isTraceEnabled() )
 			log.trace( "optimize Lucene index: " + entity );
-		//TODO get a batchIndexWriter of some kind that deals with different merge factors and all
-		//TODO this one should not compete with a regular IndexWriter (ie exception from the workspace)
-		IndexWriter writer = workspace.getIndexWriter( entity, false );
+		IndexWriter writer = workspace.getIndexWriter( provider, entity, false );
 		try {
 			writer.optimize();
 			workspace.optimize(entity);
@@ -127,4 +128,24 @@
 			throw new SearchException( "Unable to optimize Lucene index: " + entity, e );
 		}
 	}
+
+	public static class WorkWithPayload {
+		private LuceneWork work;
+		private DirectoryProvider provider;
+
+
+		public WorkWithPayload(LuceneWork work, DirectoryProvider provider) {
+			this.work = work;
+			this.provider = provider;
+		}
+
+
+		public LuceneWork getWork() {
+			return work;
+		}
+
+		public DirectoryProvider getProvider() {
+			return provider;
+		}
+	}
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -44,6 +44,7 @@
 import org.hibernate.search.bridge.FieldBridge;
 import org.hibernate.search.bridge.TwoWayFieldBridge;
 import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderShardingStrategy;
 import org.hibernate.search.util.BinderHelper;
 import org.hibernate.search.util.ScopedAnalyzer;
 
@@ -61,7 +62,8 @@
 
 	private final PropertiesMetadata rootPropertiesMetadata;
 	private final XClass beanClass;
-	private final DirectoryProvider directoryProvider;
+	private final DirectoryProvider[] directoryProviders;
+	private final DirectoryProviderShardingStrategy shardingStrategy;
 	private String idKeywordName;
 	private XMember idGetter;
 	private Float idBoost;
@@ -74,11 +76,12 @@
 	private ScopedAnalyzer analyzer;
 
 
-	public DocumentBuilder(XClass clazz, Analyzer defaultAnalyzer, DirectoryProvider directory,
-						   ReflectionManager reflectionManager) {
+	public DocumentBuilder(XClass clazz, Analyzer defaultAnalyzer, DirectoryProvider[] directoryProviders,
+						   DirectoryProviderShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
 		this.analyzer = new ScopedAnalyzer();
 		this.beanClass = clazz;
-		this.directoryProvider = directory;
+		this.directoryProviders = directoryProviders;
+		this.shardingStrategy = shardingStrategy;
 		//FIXME get rid of it when boost is stored?
 		this.reflectionManager = reflectionManager;
 
@@ -387,13 +390,14 @@
 
 		}
 		boolean searchForContainers = false;
+		String idInString = idBridge.objectToString( id );
 		if ( workType == WorkType.ADD ) {
 			Document doc = getDocument( entity, id );
-			queue.add( new AddLuceneWork( id, entityClass, doc ) );
+			queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
 			searchForContainers = true;
 		}
 		else if ( workType == WorkType.DELETE ) {
-			queue.add( new DeleteLuceneWork( id, entityClass ) );
+			queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
 		}
 		else if ( workType == WorkType.UPDATE ) {
 			Document doc = getDocument( entity, id );
@@ -404,14 +408,14 @@
 			 * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
 			 * double file opening.
 			 */
-			queue.add( new DeleteLuceneWork( id, entityClass ) );
-			queue.add( new AddLuceneWork( id, entityClass, doc ) );
+			queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+			queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
 			searchForContainers = true;
 		}
 		else if ( workType == WorkType.INDEX ) {
 			Document doc = getDocument( entity, id );
-			queue.add( new DeleteLuceneWork( id, entityClass ) );
-			LuceneWork work = new AddLuceneWork( id, entityClass, doc );
+			queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+			LuceneWork work = new AddLuceneWork( id, idInString, entityClass, doc );
 			work.setBatch( true );
 			queue.add( work );
 			searchForContainers = true;
@@ -578,10 +582,14 @@
 		return new Term( idKeywordName, idBridge.objectToString( id ) );
 	}
 
-	public DirectoryProvider getDirectoryProvider() {
-		return directoryProvider;
+	public DirectoryProvider[] getDirectoryProviders() {
+		return directoryProviders;
 	}
 
+	public DirectoryProviderShardingStrategy getDirectoryProviderSelectionStrategy() {
+		return shardingStrategy;
+	}
+
 	public Analyzer getAnalyzer() {
 		return analyzer;
 	}

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -226,9 +226,9 @@
 		return reflectionManager;
 	}
 
-	public DirectoryProvider getDirectoryProvider(Class entity) {
+	public DirectoryProvider[] getDirectoryProviders(Class entity) {
 		DocumentBuilder<Object> documentBuilder = getDocumentBuilders().get( entity );
-		return documentBuilder == null ? null : documentBuilder.getDirectoryProvider();
+		return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
 	}
 
 	public void optimize() {
@@ -257,13 +257,11 @@
 				XClass mappedXClass = reflectionManager.toXClass(mappedClass);
 				if ( mappedXClass != null) {
 					if ( mappedXClass.isAnnotationPresent( Indexed.class ) ) {
-						DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg, this );
-						//TODO move that into DirectoryProviderFactory
-						if ( !lockableDirectoryProviders.containsKey( provider ) ) {
-							lockableDirectoryProviders.put( provider, new ReentrantLock() );
-						}
+						DirectoryProviderFactory.DirectoryProviders providers = factory.createDirectoryProviders( mappedXClass, cfg, this );
+
 						final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
-								mappedXClass, analyzer, provider, reflectionManager
+								mappedXClass, analyzer, providers.getProviders(), providers.getSelectionStrategy(),
+								reflectionManager
 						);
 
 						documentBuilders.put( mappedClass, documentBuilder );

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -407,9 +407,12 @@
 		if ( classes == null || classes.length == 0 ) {
 			//no class means all classes
 			for (DocumentBuilder builder : builders.values()) {
-				final DirectoryProvider directoryProvider = builder.getDirectoryProvider();
-				if ( !directories.contains( directoryProvider ) ) {
-					directories.add( directoryProvider );
+				final DirectoryProvider[] directoryProviders =
+						builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForQuery();
+				for (DirectoryProvider provider : directoryProviders) {
+					if ( !directories.contains( provider ) ) {
+						directories.add( provider );
+					}
 				}
 			}
 			classesAndSubclasses = null;
@@ -426,9 +429,12 @@
 				//TODO should we rather choose a polymorphic path and allow non mapped entities
 				if ( builder == null )
 					throw new HibernateException( "Not a mapped entity (don't forget to add @Indexed): " + clazz );
-				final DirectoryProvider directoryProvider = builder.getDirectoryProvider();
-				if ( !directories.contains( directoryProvider ) ) {
-					directories.add( directoryProvider );
+				final DirectoryProvider[] directoryProviders = 
+						builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForQuery();
+				for (DirectoryProvider provider : directoryProviders) {
+					if ( !directories.contains( provider ) ) {
+						directories.add( provider );
+					}
 				}
 			}
 			classesAndSubclasses = involvedClasses;

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderFactory.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderFactory.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -5,6 +5,8 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.hibernate.HibernateException;
 import org.hibernate.cfg.Configuration;
@@ -15,6 +17,7 @@
 import org.hibernate.search.store.optimization.OptimizerStrategy;
 import org.hibernate.search.store.optimization.IncrementalOptimizerStrategy;
 import org.hibernate.search.store.optimization.NoOpOptimizerStrategy;
+import org.hibernate.search.SearchException;
 import org.hibernate.mapping.PersistentClass;
 import org.hibernate.annotations.common.reflection.ReflectionManager;
 import org.hibernate.annotations.common.reflection.XClass;
@@ -47,7 +50,7 @@
 	private static String LUCENE_DEFAULT = LUCENE_PREFIX + "default.";
 	private static String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
 	
-	// Lucene index performance paramters
+	// Lucene index performance parameters
 	private static final String MERGE_FACTOR = "merge_factor";
 	private static final String MAX_MERGE_DOCS = "max_merge_docs";
 	private static final String MAX_BUFFERED_DOCS = "max_buffered_docs";
@@ -55,13 +58,66 @@
 	private static final String BATCH_MAX_MERGE_DOCS = "max_merge_docs.batch";
 	private static final String BATCH_MAX_BUFFERED_DOCS = "max_buffered_docs.batch";
 
-	//TODO for the public?
-	public DirectoryProvider<?> createDirectoryProvider(XClass entity, Configuration cfg, SearchFactoryImplementor searchFactoryImplementor) {
+	private static final String SHARDING_STRATEGY = "sharding_strategy";
+	private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
+
+
+	public DirectoryProviders createDirectoryProviders(XClass entity, Configuration cfg, SearchFactoryImplementor searchFactoryImplementor) {
 		//get properties
 		String directoryProviderName = getDirectoryProviderName( entity, cfg );
-		Properties indexProps = getDirectoryProperties( cfg, directoryProviderName );
+		Properties[] indexProps = getDirectoryProperties( cfg, directoryProviderName );
 
-		//set up the directory
+		//set up the directories
+		int nbrOfProviders = indexProps.length;
+		DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
+		for (int index = 0 ; index < nbrOfProviders ; index++) {
+			providers[index] = createDirectoryProvider(directoryProviderName + "_" + index,indexProps[index], searchFactoryImplementor);
+		}
+
+		//define sharding strategy
+		DirectoryProviderShardingStrategy shardingStrategy;
+		Properties shardingProperties = new Properties();
+		for (Map.Entry entry : indexProps[0].entrySet()) {
+			if ( ( (String) entry.getKey() ).startsWith( SHARDING_STRATEGY ) ) {
+				shardingProperties.put( entry.getKey(), entry.getValue() );
+			}
+		}
+		String shardingStrategyName = shardingProperties.getProperty( SHARDING_STRATEGY );
+		if ( shardingStrategyName == null) {
+			if ( indexProps.length == 1 ) {
+				shardingStrategy = new NotShardedStrategy();
+			}
+			else {
+				shardingStrategy = new IdHashShardingStrategy();
+			}
+		}
+		else {
+			try {
+				Class shardigStrategyClass = ReflectHelper.classForName( shardingStrategyName, this.getClass() );
+				shardingStrategy = (DirectoryProviderShardingStrategy) shardigStrategyClass.newInstance();
+			}
+			catch (ClassNotFoundException e) {
+				throw new SearchException("Unable to find ShardingStrategy class " + shardingStrategyName + " for " + directoryProviderName, e);
+			}
+			catch (IllegalAccessException e) {
+				throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
+						+ " Be sure to have a no-arg constructor", e);
+			}
+			catch (InstantiationException e) {
+				throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
+						+ " Be sure to have a no-arg constructor", e);
+			}
+			catch (ClassCastException e) {
+				throw new SearchException("ShardingStrategy class does not implements DirecotryProviderShardingStrategy: "
+						+ shardingStrategyName, e);
+			}
+		}
+		shardingStrategy.init( shardingProperties, providers );
+
+		return new DirectoryProviders( shardingStrategy, providers );
+	}
+
+	private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps, SearchFactoryImplementor searchFactoryImplementor) {
 		String className = indexProps.getProperty( "directory_provider" );
 		if ( StringHelper.isEmpty( className ) ) {
 			className = DEFAULT_DIRECTORY_PROVIDER;
@@ -92,6 +148,9 @@
 			configureOptimizerStrategy(searchFactoryImplementor, indexProps, provider);
 			configureIndexingParameters(searchFactoryImplementor, indexProps, provider);
 			providers.add( provider );
+			if ( !searchFactoryImplementor.getLockableDirectoryProviders().containsKey( provider ) ) {
+				searchFactoryImplementor.getLockableDirectoryProviders().put( provider, new ReentrantLock() );
+			}
 			return provider;
 		}
 	}
@@ -186,26 +245,96 @@
 			}
 		}	
 		searchFactoryImplementor.addIndexingParmeters(provider, indexingParams);
-	}	
+	}
 
-	private static Properties getDirectoryProperties(Configuration cfg, String directoryProviderName) {
+	/**
+	 * Returns an array of directory properties
+	 * Properties are defaulted. For a given proeprty name,
+	 * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search
+	 * If the Index is not sharded, a single Properties is returned
+	 * If the index is sharded, the Properties index matches the shard index
+	 */
+	private static Properties[] getDirectoryProperties(Configuration cfg, String directoryProviderName) {
 		Properties props = cfg.getProperties();
 		String indexName = LUCENE_PREFIX + directoryProviderName;
-		Properties indexProps = new Properties();
-		Properties indexSpecificProps = new Properties();
+		//indexSpecificProperties[i] >> indexSpecificDefaultproperties >> defaultProperties
+		Properties defaultProperties = new Properties();
+		ArrayList<Properties> indexSpecificProps = new ArrayList<Properties>();
+		Properties indexSpecificDefaultProps = new Properties(defaultProperties);
 		for ( Map.Entry entry : props.entrySet() ) {
 			String key = (String) entry.getKey();
 			if ( key.startsWith( LUCENE_DEFAULT ) ) {
-				indexProps.setProperty( key.substring( LUCENE_DEFAULT.length() ), (String) entry.getValue() );
+				defaultProperties.setProperty( key.substring( LUCENE_DEFAULT.length() ), (String) entry.getValue() );
 			}
 			else if ( key.startsWith( indexName ) ) {
-				indexSpecificProps.setProperty( key.substring( indexName.length() ), (String) entry.getValue() );
+				String suffixedKey = key.substring( indexName.length() + 1 );
+				int nextDoc = suffixedKey.indexOf( '.' );
+				int index = -1;
+				if ( nextDoc != -1 ) {
+				    String potentialNbr = suffixedKey.substring( 0, nextDoc );
+					try {
+						index = Integer.parseInt( potentialNbr );
+					}
+					catch ( Exception e ) {
+						//just not a number
+						index = -1;
+					}
+				}
+				if (index == -1) {
+					indexSpecificDefaultProps.setProperty( suffixedKey, (String) entry.getValue() );
+				}
+				else {
+					String finalKeyName = suffixedKey.substring( nextDoc + 1 );
+					//ignore sharding strategy properties
+					if ( ! finalKeyName.startsWith( SHARDING_STRATEGY ) ) {
+						ensureListSize( indexSpecificProps, index + 1 );
+						Properties propertiesforIndex = indexSpecificProps.get( index );
+						if ( propertiesforIndex == null ) {
+							propertiesforIndex = new Properties( indexSpecificDefaultProps );
+							indexSpecificProps.set( index, propertiesforIndex );
+						}
+						propertiesforIndex.setProperty( finalKeyName, (String) entry.getValue() );
+					}
+				}
 			}
 		}
-		indexProps.putAll( indexSpecificProps );
-		return indexProps;
+		String nbrOfShardsString = indexSpecificDefaultProps.getProperty( NBR_OF_SHARDS );
+		int nbrOfShards = -1;
+		if ( nbrOfShardsString != null ) {
+			try {
+				nbrOfShards = Integer.parseInt( nbrOfShardsString );
+			}
+			catch (NumberFormatException e) {
+				throw new SearchException(indexName + "." + NBR_OF_SHARDS + " is not a number", e);
+			}
+		}
+		if ( nbrOfShards <= 0 && indexSpecificProps.size() == 0 ) {
+			//no shard (a shareded subindex has to have at least one property
+			return new Properties[] { indexSpecificDefaultProps };
+		}
+		else {
+			//sharded
+			nbrOfShards = nbrOfShards >= indexSpecificDefaultProps.size() ?
+					nbrOfShards :
+					indexSpecificDefaultProps.size();
+			ensureListSize( indexSpecificProps, nbrOfShards );
+			for ( int index = 0 ; index < nbrOfShards ; index++ ) {
+				if ( indexSpecificProps.get( index ) == null ) {
+					indexSpecificProps.set( index, new Properties( indexSpecificDefaultProps ) );
+				}
+			}
+			return indexSpecificProps.toArray( new Properties[ indexSpecificProps.size() ] );
+		}
 	}
 
+	private static void ensureListSize(ArrayList<Properties> indexSpecificProps, int size) {
+		//ensure the index exists
+		indexSpecificProps.ensureCapacity( size );
+		while ( indexSpecificProps.size() < size ) {
+			indexSpecificProps.add(null);
+		}
+	}
+
 	private static String getDirectoryProviderName(XClass clazz, Configuration cfg) {
 		//yuk
 		ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
@@ -237,4 +366,24 @@
 					"Trying to extract the index name from a non @Indexed class: " + clazz.getName() );
 		}
 	}
+
+	public class DirectoryProviders {
+		private DirectoryProviderShardingStrategy shardingStrategy;
+		private DirectoryProvider[] providers;
+
+
+		public DirectoryProviders(DirectoryProviderShardingStrategy shardingStrategy, DirectoryProvider[] providers) {
+			this.shardingStrategy = shardingStrategy;
+			this.providers = providers;
+		}
+
+
+		public DirectoryProviderShardingStrategy getSelectionStrategy() {
+			return shardingStrategy;
+		}
+
+		public DirectoryProvider[] getProviders() {
+			return providers;
+		}
+	}
 }

Added: trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderShardingStrategy.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderShardingStrategy.java	                        (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/store/DirectoryProviderShardingStrategy.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -0,0 +1,18 @@
+//$Id$
+package org.hibernate.search.store;
+
+import java.io.Serializable;
+import java.util.Properties;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface DirectoryProviderShardingStrategy {
+	void init(Properties properties, DirectoryProvider[] providers);
+	DirectoryProvider[] getDirectoryProvidersForQuery();
+	DirectoryProvider getDirectoryProviderForAddition(Class entity, Serializable id, String idInString, Document document);
+	DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString);
+	DirectoryProvider[] getDirectoryProvidersForOptimization(Class entity);
+}

Added: trunk/HibernateExt/search/src/java/org/hibernate/search/store/IdHashShardingStrategy.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/store/IdHashShardingStrategy.java	                        (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/store/IdHashShardingStrategy.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -0,0 +1,46 @@
+//$Id$
+package org.hibernate.search.store;
+
+import java.util.Properties;
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * This implementation use idInString as the hashKey.
+ * 
+ * @author Emmanuel Bernard
+ */
+public class IdHashShardingStrategy implements DirectoryProviderShardingStrategy {
+	private DirectoryProvider[] providers;
+	public void init(Properties properties, DirectoryProvider[] providers) {
+		this.providers = providers;
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForQuery() {
+		return providers;
+	}
+
+	public DirectoryProvider getDirectoryProviderForAddition(Class entity, Serializable id, String idInString, Document document) {
+		return providers[ hashKey(idInString) ];
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString) {
+		return new DirectoryProvider[] { providers[ hashKey(idInString) ] };
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForOptimization(Class entity) {
+		return providers;
+	}
+
+	private int hashKey(String key) {
+		//reproduce the hashCode implementaiton of String as documented in the javadoc
+		// to be safe cross Java version (in case it changes some day)
+		int hash = 0;
+		int length = key.length();
+		for (int index = 0 ; index < length ; index++) {
+			hash = 31*hash + key.charAt( index );
+		}
+		return hash % providers.length;
+	}
+}

Added: trunk/HibernateExt/search/src/java/org/hibernate/search/store/NotShardedStrategy.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/store/NotShardedStrategy.java	                        (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/store/NotShardedStrategy.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -0,0 +1,37 @@
+//$Id$
+package org.hibernate.search.store;
+
+import java.util.Properties;
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class NotShardedStrategy implements DirectoryProviderShardingStrategy {
+	private DirectoryProvider[] directoryProvider;
+	public void init(Properties properties, DirectoryProvider[] providers) {
+		this.directoryProvider = providers;
+		if ( directoryProvider.length > 1) {
+			throw new AssertionFailure("Using SingleDirectoryProviderSelectionStrategy with multiple DirectryProviders");
+		}
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForQuery() {
+		return directoryProvider;
+	}
+
+	public DirectoryProvider getDirectoryProviderForAddition(Class entity, Serializable id, String idInString, Document document) {
+		return directoryProvider[0];
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString) {
+		return directoryProvider;
+	}
+
+	public DirectoryProvider[] getDirectoryProvidersForOptimization(Class entity) {
+		return directoryProvider;
+	}
+}

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -20,7 +20,7 @@
 	}
 
 	protected Directory getDirectory(Class clazz) {
-		return getLuceneEventListener().getSearchFactoryImplementor().getDirectoryProvider( clazz ).getDirectory();
+		return getLuceneEventListener().getSearchFactoryImplementor().getDirectoryProviders( clazz )[0].getDirectory();
 	}
 
 	private FullTextIndexEventListener getLuceneEventListener() {

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java	2007-07-29 14:12:13 UTC (rev 12843)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -57,7 +57,7 @@
 		doc.add( field );
 		field = new Field("logo", ts.getLogo(), Field.Store.NO, Field.Index.TOKENIZED );
 		doc.add( field );
-		LuceneWork luceneWork = new AddLuceneWork(ts.getId(), ts.getClass(), doc );
+		LuceneWork luceneWork = new AddLuceneWork(ts.getId(), String.valueOf( ts.getId() ), ts.getClass(), doc );
 		List<LuceneWork> queue = new ArrayList<LuceneWork>();
 		queue.add( luceneWork );
 

Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/Animal.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/Animal.java	                        (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/Animal.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -0,0 +1,40 @@
+//$Id$
+package org.hibernate.search.test.shards;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+ at Entity
+ at Indexed(index = "Animal")
+public class Animal {
+	@Id
+	@DocumentId
+	private Integer id;
+	@Field(index = Index.TOKENIZED)
+	private String name;
+
+
+	public Integer getId() {
+		return id;
+	}
+
+	public void setId(Integer id) {
+		this.id = id;
+	}
+
+	public String getName() {
+		return name;
+	}
+
+	public void setName(String name) {
+		this.name = name;
+	}
+}

Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/ShardsTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/ShardsTest.java	                        (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/shards/ShardsTest.java	2007-07-29 14:56:47 UTC (rev 12844)
@@ -0,0 +1,190 @@
+//$Id$
+package org.hibernate.search.test.shards;
+
+import java.io.File;
+import java.util.Properties;
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.store.IdHashShardingStrategy;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ShardsTest extends SearchTestCase {
+
+
+	protected void configure(Configuration cfg) {
+		super.configure( cfg );
+		cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+		File sub = getBaseIndexDir();
+		cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+		cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+		//is the default when multiple shards are set up
+		//cfg.setProperty( "hibernate.search.Animal.sharding_strategy", IdHashShardingStrategy.class );
+		cfg.setProperty( "hibernate.search.Animal.sharding_strategy.nbr_of_shards", "2" );
+		cfg.setProperty( "hibernate.search.Animal.0.indexName", "Animal00" );
+	}
+
+	public void testIdShardingStrategy() {
+		DirectoryProvider[] dps = new DirectoryProvider[] { new RAMDirectoryProvider(), new RAMDirectoryProvider() };
+		IdHashShardingStrategy shardingStrategy = new IdHashShardingStrategy();
+		shardingStrategy.init( null, dps);
+		assertTrue( dps[1] == shardingStrategy.getDirectoryProviderForAddition( Animal.class, 1, "1", null) );
+		assertTrue( dps[0] == shardingStrategy.getDirectoryProviderForAddition( Animal.class, 2, "2", null) );
+	}
+
+	public void testBehavior() throws Exception {
+		Session s = openSession( );
+		Transaction tx = s.beginTransaction();
+		Animal a = new Animal();
+		a.setId( 1 );
+		a.setName( "Elephant" );
+		s.persist( a );
+		a = new Animal();
+		a.setId( 2 );
+		a.setName( "Bear" );
+		s.persist( a );
+		tx.commit();
+
+		s.clear();
+
+		tx = s.beginTransaction();
+		a = (Animal) s.get(Animal.class, 1);
+		a.setName( "Mouse" );
+		tx.commit();
+
+		s.clear();
+
+		tx = s.beginTransaction();
+		FullTextSession fts = Search.createFullTextSession( s );
+		QueryParser parser = new QueryParser("id", new StopAnalyzer() );
+
+		List results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear" ) ).list();
+		assertEquals( "Either double insert, single update, or query fails with shards", 2, results.size() );
+		for (Object o : results) s.delete( o );
+		tx.commit();
+		s.close();
+	}
+
+	public void testInternalSharding() throws Exception {
+		Session s = openSession( );
+		Transaction tx = s.beginTransaction();
+		Animal a = new Animal();
+		a.setId( 1 );
+		a.setName( "Elephant" );
+		s.persist( a );
+		a = new Animal();
+		a.setId( 2 );
+		a.setName( "Bear" );
+		s.persist( a );
+		tx.commit();
+
+		s.clear();
+
+		IndexReader reader = IndexReader.open( new File( getBaseIndexDir(), "Animal00" ) );
+		try {
+			int num = reader.numDocs();
+			assertEquals( 1, num );
+		}
+		finally {
+			reader.close();
+		}
+		reader = IndexReader.open( new File( getBaseIndexDir(), "Animal_1" ) );
+		try {
+			int num = reader.numDocs();
+			assertEquals( 1, num );
+		}
+		finally {
+			reader.close();
+		}
+
+
+		tx = s.beginTransaction();
+		a = (Animal) s.get(Animal.class, 1);
+		a.setName( "Mouse" );
+		tx.commit();
+
+		s.clear();
+
+		reader = IndexReader.open( new File( getBaseIndexDir(), "Animal_1" ) );
+		try {
+			int num = reader.numDocs();
+			assertEquals( 1, num );
+			TermDocs docs = reader.termDocs( new Term( "name", "mouse" ) );
+			assertTrue( docs.next() );
+			org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+			assertFalse( docs.next() );
+		}
+		finally {
+			reader.close();
+		}
+
+		tx = s.beginTransaction();
+		FullTextSession fts = Search.createFullTextSession( s );
+		QueryParser parser = new QueryParser("id", new StopAnalyzer() );
+
+		List results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear" ) ).list();
+		assertEquals( "Either double insert, single update, or query fails with shards", 2, results.size() );
+		for (Object o : results) s.delete( o );
+		tx.commit();
+		s.close();
+	}
+
+	protected void setUp() throws Exception {
+		File sub = getBaseIndexDir();
+		sub.mkdir();
+		File[] files = sub.listFiles();
+		for (File file : files) {
+			if ( file.isDirectory() ) {
+				delete( file );
+			}
+		}
+		//super.setUp(); //we need a fresh session factory each time for index set up
+		buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+	}
+
+	private File getBaseIndexDir() {
+		File current = new File( "." );
+		File sub = new File( current, "indextemp" );
+		return sub;
+	}
+
+	protected void tearDown() throws Exception {
+		super.tearDown();
+		File sub = getBaseIndexDir();
+		delete( sub );
+	}
+
+	private void delete(File sub) {
+		if ( sub.isDirectory() ) {
+			for ( File file : sub.listFiles() ) {
+				delete( file );
+			}
+			sub.delete();
+		}
+		else {
+			sub.delete();
+		}
+	}
+
+	protected Class[] getMappings() {
+		return new Class[] {
+				Animal.class
+		};
+	}
+}




More information about the hibernate-commits mailing list