[hibernate-commits] Hibernate SVN: r13953 - in search/trunk/src/java/org/hibernate/search: backend and 6 other directories.

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Tue Aug 28 11:38:29 EDT 2007


Author: epbernard
Date: 2007-08-28 11:38:29 -0400 (Tue, 28 Aug 2007)
New Revision: 13953

Modified:
   search/trunk/src/java/org/hibernate/search/FullTextSession.java
   search/trunk/src/java/org/hibernate/search/backend/QueueingProcessor.java
   search/trunk/src/java/org/hibernate/search/backend/Work.java
   search/trunk/src/java/org/hibernate/search/backend/WorkType.java
   search/trunk/src/java/org/hibernate/search/backend/Worker.java
   search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
   search/trunk/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
   search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
   search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
   search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
   search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
   search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
   search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
   search/trunk/src/java/org/hibernate/search/store/IdHashShardingStrategy.java
   search/trunk/src/java/org/hibernate/search/store/IndexShardingStrategy.java
Log:
HSEARCH-69 purge and purge all

Modified: search/trunk/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/FullTextSession.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/FullTextSession.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -1,6 +1,8 @@
 //$Id$
 package org.hibernate.search;
 
+import java.io.Serializable;
+
 import org.hibernate.classic.Session;
 
 /**
@@ -26,4 +28,19 @@
 	 * return the SearchFactory
 	 */
 	SearchFactory getSearchFactory();
+
+	/**
+	 * Remove a particular entity from a particular class of an index.
+	 *
+	 * @param entityType
+	 * @param id
+	 */
+	public void purge(Class entityType, Serializable id);
+
+	/**
+	 * Remove all entities from a particular class of an index.
+	 *
+	 * @param entityType
+	 */
+	public void purge(Class entityType);
 }

Modified: search/trunk/src/java/org/hibernate/search/backend/QueueingProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/QueueingProcessor.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/QueueingProcessor.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -20,7 +20,7 @@
 	 * Add a work
 	 * TODO move that womewhere else, it does not really fit here
 	 */
-	void add(Object entity, Serializable id, WorkType workType, WorkQueue workQueue);
+	void add(Work work, WorkQueue workQueue);
 
 	/**
 	 * prepare resources for a later performWorks call

Modified: search/trunk/src/java/org/hibernate/search/backend/Work.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Work.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/Work.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -12,6 +12,7 @@
  */
 public class Work {
 	private Object entity;
+	private Class entityClass;
 	private Serializable id;
 	private XMember idGetter;
 	private WorkType type;
@@ -23,12 +24,21 @@
 		this.type = type;
 	}
 
+	public Work(Class entityType, Serializable id, WorkType type) {
+		this.entityClass = entityType;
+		this.id = id;
+		this.type = type;
+	}
 
 	public Work(Object entity, XMember idGetter, WorkType type) {
 		this.entity = entity;
 		this.idGetter = idGetter;
 		this.type = type;
 	}
+	
+	public Class getEntityClass() {
+		return entityClass;
+	}
 
 	public Object getEntity() {
 		return entity;

Modified: search/trunk/src/java/org/hibernate/search/backend/WorkType.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/WorkType.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/WorkType.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -7,11 +7,22 @@
  * 
  * @author Emmanuel Bernard
  * @author Hardy Ferentschik
+ * @author John Griffin
  */
 public enum WorkType {
 	ADD,
 	UPDATE,
 	DELETE,
+	/**
+	 * Used to remove a specific instance
+	 * of a class from an index.
+	 */
+	PURGE,
+	/**
+	 * Used to remove all instances of a
+	 * class from an index.
+	 */
+	PURGE_ALL,
 	
 	/**
 	 * This type is used for batch indexing.

Modified: search/trunk/src/java/org/hibernate/search/backend/Worker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Worker.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/Worker.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -12,7 +12,7 @@
  * @author Emmanuel Bernard
  */
 public interface Worker {
-	void performWork(Object entity, Serializable id, WorkType workType, EventSource session);
+	void performWork(Work work, EventSource session);
 
 	void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor);
 }

Modified: search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -96,9 +96,8 @@
 		searchFactoryImplementor.setBackendQueueProcessorFactory( backendQueueProcessorFactory );
 	}
 
-	public void add(Object entity, Serializable id, WorkType workType, WorkQueue workQueue) {
+	public void add(Work work, WorkQueue workQueue) {
 		//don't check for builder it's done in prepareWork
-		Work work = new Work(entity, id, workType);
 		workQueue.add( work );
 		if ( batchSize > 0 && workQueue.size() >= batchSize ) {
 			WorkQueue subQueue = workQueue.splitQueue();
@@ -115,10 +114,12 @@
 		for ( int i = 0 ; i < initialSize ; i++ ) {
 			Work work = queue.get( i );
 			queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
-			Class entityClass = Hibernate.getClass( work.getEntity() );
+			Class entityClass = work.getEntityClass() != null ?
+						work.getEntityClass() :
+						Hibernate.getClass( work.getEntity() );
 			DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entityClass );
 			if ( builder == null ) return; //or exception?
-			builder.addWorkToQueue(work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
+			builder.addWorkToQueue(entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
 		}
 		workQueue.setSealedQueue( luceneQueue );
 	}

Modified: search/trunk/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -32,8 +32,8 @@
 		this.queuePerTransaction = queuePerTransaction;
 	}
 
-	public void add(Object entity, Serializable id, WorkType workType) {
-		queueingProcessor.add( entity, id, workType, queue );
+	public void add(Work work) {
+		queueingProcessor.add( work, queue );
 	}
 
 	public boolean isConsumed() {

Modified: search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -8,6 +8,7 @@
 import org.hibernate.search.backend.QueueingProcessor;
 import org.hibernate.search.backend.WorkType;
 import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.Work;
 import org.hibernate.search.backend.impl.BatchedQueueingProcessor;
 import org.hibernate.search.util.WeakIdentityHashMap;
 import org.hibernate.search.engine.SearchFactoryImplementor;
@@ -28,7 +29,7 @@
 	protected WeakIdentityHashMap synchronizationPerTransaction = new WeakIdentityHashMap();
 	private QueueingProcessor queueingProcessor;
 
-	public void performWork(Object entity, Serializable id, WorkType workType, EventSource session) {
+	public void performWork(Work work, EventSource session) {
 		if ( session.isTransactionInProgress() ) {
 			Transaction transaction = session.getTransaction();
 			PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
@@ -38,11 +39,11 @@
 				transaction.registerSynchronization( txSync );
 				synchronizationPerTransaction.put(transaction, txSync);
 			}
-			txSync.add( entity, id, workType );
+			txSync.add( work );
 		}
 		else {
 			WorkQueue queue = new WorkQueue(2); //one work can be split
-			queueingProcessor.add( entity, id, workType, queue );
+			queueingProcessor.add( work, queue );
 			queueingProcessor.prepareWorks( queue );
 			queueingProcessor.performWorks( queue );
 		}

Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -13,6 +13,7 @@
 import org.hibernate.search.backend.Workspace;
 import org.hibernate.search.backend.OptimizeLuceneWork;
 import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
 import org.hibernate.search.engine.SearchFactoryImplementor;
 import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.store.DirectoryProvider;
@@ -24,6 +25,7 @@
  *
  * @author Emmanuel Bernard
  * @author Hardy Ferentschik
+ * @author John Griffin
  */
 public class LuceneBackendQueueProcessor implements Runnable {
 	
@@ -51,7 +53,17 @@
 				DocumentBuilder documentBuilder = searchFactoryImplementor.getDocumentBuilders().get( work.getEntityClass() );
 				IndexShardingStrategy shardingStrategy = documentBuilder.getDirectoryProviderSelectionStrategy();
 
-				if ( AddLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
+				if ( PurgeAllLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
+					DirectoryProvider[] providers = shardingStrategy.getDirectoryProvidersForDeletion(
+							work.getEntityClass(),
+							work.getId(),
+							work.getIdInString()
+					);
+					for (DirectoryProvider provider : providers) {
+						queueWithFlatDPs.add( new LuceneWorker.WorkWithPayload( work, provider ) );
+					}
+				}
+				else if ( AddLuceneWork.class.isAssignableFrom( work.getClass() ) ) {
 					DirectoryProvider provider = shardingStrategy.getDirectoryProviderForAddition(
 							work.getEntityClass(),
 							work.getId(),

Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -18,6 +18,7 @@
 import org.hibernate.search.backend.LuceneWork;
 import org.hibernate.search.backend.OptimizeLuceneWork;
 import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
 import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.store.DirectoryProvider;
 
@@ -26,6 +27,7 @@
  *
  * @author Emmanuel Bernard
  * @author Hardy Ferentschik
+ * @author John Griffin
  */
 public class LuceneWorker {
 	private Workspace workspace;
@@ -45,6 +47,8 @@
 		}
 		else if ( OptimizeLuceneWork.class.isAssignableFrom( workClass ) ) {
 			performWork( (OptimizeLuceneWork) luceneWork.getWork(), luceneWork.getProvider() );
+		}else if ( PurgeAllLuceneWork.class.isAssignableFrom( workClass ) ) {
+			performWork( (PurgeAllLuceneWork) luceneWork.getWork(), luceneWork.getProvider() );
 		}
 		else {
 			throw new AssertionFailure( "Unknown work type: " + workClass );
@@ -129,6 +133,20 @@
 		}
 	}
 
+	public void performWork(PurgeAllLuceneWork work, DirectoryProvider provider) {
+		Class entity = work.getEntityClass();
+		if ( log.isTraceEnabled() )
+			log.trace( "purgeAll Lucene index: " + entity );
+		IndexReader reader = workspace.getIndexReader( provider, entity );
+		try {
+			Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, entity.getName() );
+			reader.deleteDocuments( term );
+		}
+		catch (Exception e) {
+			throw new SearchException( "Unable to purge all from Lucene index: " + entity, e );
+		}
+	}
+
 	public static class WorkWithPayload {
 		private LuceneWork work;
 		private DirectoryProvider provider;

Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -40,6 +40,7 @@
 import org.hibernate.search.backend.DeleteLuceneWork;
 import org.hibernate.search.backend.LuceneWork;
 import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
 import org.hibernate.search.bridge.BridgeFactory;
 import org.hibernate.search.bridge.FieldBridge;
 import org.hibernate.search.bridge.TwoWayFieldBridge;
@@ -396,14 +397,18 @@
 		return value;
 	}
 
-	public void addWorkToQueue(T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
-		Class entityClass = Hibernate.getClass( entity );
+	//TODO could we use T instead of EntityClass?
+	public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
 		//TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition 
 		for (LuceneWork luceneWork : queue) {
-			//whatever the actual work, we should ignore
+			//any work on the same entity should be ignored
 			if ( luceneWork.getEntityClass() == entityClass
-					&& luceneWork.getId().equals( id ) ) {//find a way to use Type.equals(x,y)
-				return;
+					 ) {
+				Serializable currentId = luceneWork.getId();
+				if ( currentId != null  && currentId.equals( id ) ) { //find a way to use Type.equals(x,y)
+					return;
+				}
+				//TODO do something to avoid multiple PURGE ALL and OPTIMIZE
 			}
 
 		}
@@ -414,9 +419,12 @@
 			queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
 			searchForContainers = true;
 		}
-		else if ( workType == WorkType.DELETE ) {
+		else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
 			queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
 		}
+		else if ( workType == WorkType.PURGE_ALL ) {
+			queue.add( new PurgeAllLuceneWork( entityClass ) );
+		}
 		else if ( workType == WorkType.UPDATE ) {
 			Document doc = getDocument( entity, id );
 			/**
@@ -499,7 +507,7 @@
 	private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
 										 DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor) {
 		Serializable id = (Serializable) builder.getMemberValue( value, builder.idGetter );
-		builder.addWorkToQueue( value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
+		builder.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
 	}
 
 	public Document getDocument(T instance, Serializable id) {

Modified: search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -15,6 +15,7 @@
 import org.hibernate.event.PostUpdateEvent;
 import org.hibernate.event.PostUpdateEventListener;
 import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.Work;
 import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.engine.SearchFactoryImplementor;
 import org.hibernate.search.impl.SearchFactoryImpl;
@@ -77,6 +78,7 @@
 	}
 
 	private void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
-		searchFactoryImplementor.getWorker().performWork( entity, id, workType, event.getSession() );
+		Work work = new Work(entity, id, workType);
+		searchFactoryImplementor.getWorker().performWork( work, event.getSession() );
 	}
 }

Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -44,6 +44,7 @@
 import org.hibernate.search.FullTextQuery;
 import org.hibernate.search.SearchFactory;
 import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.Work;
 import org.hibernate.search.engine.DocumentBuilder;
 import org.hibernate.search.engine.SearchFactoryImplementor;
 import org.hibernate.search.query.FullTextQueryImpl;
@@ -55,6 +56,7 @@
  * Lucene full text search aware session.
  *
  * @author Emmanuel Bernard
+ * @author John Griffin
  */
 public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
 	private final Session session;
@@ -70,8 +72,8 @@
 
 	/**
 	 * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
-     * If entities is empty, include all indexed entities
-     * 
+	 * If entities is empty, include all indexed entities
+	 *
 	 * @param entities must be immutable for the lifetime of the query object
 	 */
 	public FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
@@ -79,6 +81,45 @@
 	}
 
 	/**
+	 * Remove all entities from a particular class of an index.
+	 *
+	 * @param entityType
+	 */
+	public void purge(Class entityType) {
+		purge( entityType, null );
+	}
+
+	/**
+	 * Remove a particular entity from a particular class of an index.
+	 *
+	 * @param entityType
+	 * @param id
+	 */
+	public void purge(Class entityType, Serializable id) {
+		if ( entityType == null ) return;
+		SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
+		// not strictly necessary but a small optimization plus let's make sure the
+		// client didn't mess something up.
+		Map<Class, DocumentBuilder<Object>> builders = searchFactoryImplementor.getDocumentBuilders();
+		DocumentBuilder<Object> builder = builders.get( entityType );
+
+		if ( builder == null ) {
+			throw new IllegalArgumentException( entityType.getName() + " is not a mapped entity (don't forget to add @Indexed)" );
+		}
+		else {
+			WorkType type;
+			if ( id == null ) {
+				type = WorkType.PURGE_ALL;
+			}
+			else {
+				type = WorkType.PURGE;
+			}
+			Work work = new Work(entityType, id, type);
+			searchFactoryImplementor.getWorker().performWork( work, eventSource );
+		}
+	}
+
+	/**
 	 * (re)index an entity.
 	 * Non indexable entities are ignored
 	 * The entity must be associated with the session
@@ -94,7 +135,8 @@
 		DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
 		if ( builder != null ) {
 			Serializable id = session.getIdentifier( entity );
-			searchFactoryImplementor.getWorker().performWork( entity, id, WorkType.INDEX, eventSource );
+			Work work = new Work(entity, id, WorkType.INDEX);
+			searchFactoryImplementor.getWorker().performWork( work, eventSource );
 		}
 		//TODO
 		//need to add elements in a queue kept at the Session level

Modified: search/trunk/src/java/org/hibernate/search/store/IdHashShardingStrategy.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/IdHashShardingStrategy.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/store/IdHashShardingStrategy.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -26,6 +26,7 @@
 	}
 
 	public DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString) {
+		if ( idInString == null ) return providers;
 		return new DirectoryProvider[] { providers[ hashKey(idInString) ] };
 	}
 

Modified: search/trunk/src/java/org/hibernate/search/store/IndexShardingStrategy.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/IndexShardingStrategy.java	2007-08-27 21:48:51 UTC (rev 13952)
+++ search/trunk/src/java/org/hibernate/search/store/IndexShardingStrategy.java	2007-08-28 15:38:29 UTC (rev 13953)
@@ -29,6 +29,7 @@
 	DirectoryProvider getDirectoryProviderForAddition(Class entity, Serializable id, String idInString, Document document);
 	/**
 	 * return the DirectoryProvider(s) where the given entity is stored and where the deletion operation needs to be applied
+	 * id and idInString can be null. If null, all the directory providers containing entity types should be returned
 	 */
 	DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString);
 }




More information about the hibernate-commits mailing list