Hibernate SVN: r11040 - branches/Branch_3_2/Hibernate3/test/org/hibernate/test/joinfetch.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-01-15 10:40:44 -0500 (Mon, 15 Jan 2007)
New Revision: 11040
Modified:
branches/Branch_3_2/Hibernate3/test/org/hibernate/test/joinfetch/UserGroup.hbm.xml
Log:
workaround issue with Ingres (userName is a function)
Modified: branches/Branch_3_2/Hibernate3/test/org/hibernate/test/joinfetch/UserGroup.hbm.xml
===================================================================
--- branches/Branch_3_2/Hibernate3/test/org/hibernate/test/joinfetch/UserGroup.hbm.xml 2007-01-12 14:30:07 UTC (rev 11039)
+++ branches/Branch_3_2/Hibernate3/test/org/hibernate/test/joinfetch/UserGroup.hbm.xml 2007-01-15 15:40:44 UTC (rev 11040)
@@ -14,26 +14,19 @@
<class name="User" table="AuctionUsers">
<id name="name"/>
- <map name="groups" table="AuctionUsersGroups"
- fetch="join" order-by="groupName">
- <key column="userName"/>
- <map-key formula="groupName"
- type="string"/>
- <many-to-many column="groupName"
- class="Group"/>
+ <map name="groups" table="AuctionUsersGroups" fetch="join" order-by="groupName">
+ <key column="`userName`"/>
+ <map-key formula="groupName" type="string"/>
+ <many-to-many column="groupName" class="Group"/>
</map>
</class>
<class name="Group" table="AuctionGroups">
<id name="name"/>
- <map name="users" table="AuctionUsersGroups"
- fetch="join" order-by="userName"
- inverse="true" cascade="all">
+ <map name="users" table="AuctionUsersGroups" fetch="join" order-by="`userName`" inverse="true" cascade="all">
<key column="groupName"/>
- <map-key formula="userName"
- type="string"/>
- <many-to-many column="userName"
- class="User"/>
+ <map-key formula="`userName`" type="string"/>
+ <many-to-many column="`userName`" class="User"/>
</map>
</class>
18 years
Hibernate SVN: r11039 - branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-01-12 09:30:07 -0500 (Fri, 12 Jan 2007)
New Revision: 11039
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/MapBinder.java
Log:
ANN-529 remove as in subquery
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/MapBinder.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/MapBinder.java 2007-01-10 20:13:22 UTC (rev 11038)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/MapBinder.java 2007-01-12 14:30:07 UTC (rev 11039)
@@ -282,7 +282,8 @@
String alias = "$alias$";
StringBuilder fromAndWhereSb = new StringBuilder( " from " )
.append( associatedClass.getTable().getName() )
- .append(" as ").append(alias).append(" where ");
+ //.append(" as ") //Oracle doesn't support it in subqueries
+ .append(alias).append(" where ");
Iterator collectionTableColumns = element.getColumnIterator();
while ( collectionTableColumns.hasNext() ) {
Column colColumn = (Column) collectionTableColumns.next();
18 years
Hibernate SVN: r11038 - in branches/Branch_3_2/HibernateExt/metadata/src: java/org/hibernate/search/backend and 7 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-01-10 15:13:22 -0500 (Wed, 10 Jan 2007)
New Revision: 11038
Added:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java
Modified:
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
Log:
ANN-524 share event intialization state by creating a SearchFactory
ANN-519 support multiple queue worker
Added: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/SearchFactory.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -0,0 +1,138 @@
+//$Id: $
+package org.hibernate.search;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.WeakHashMap;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.search.Environment;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.backend.WorkerFactory;
+import org.hibernate.search.backend.Worker;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.reflection.ReflectionManager;
+import org.hibernate.reflection.XClass;
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.HibernateException;
+import org.hibernate.mapping.PersistentClass;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.Analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SearchFactory {
+ private static ThreadLocal<WeakHashMap<Configuration, SearchFactory>> contexts =
+ new ThreadLocal<WeakHashMap<Configuration, SearchFactory>>();
+ static {
+ contexts.set( new WeakHashMap<Configuration, SearchFactory>(2) );
+ }
+ private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+ private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
+ new HashMap<DirectoryProvider, ReentrantLock>();
+ private Worker worker;
+
+ public SearchFactory(Configuration cfg) {
+ //yuk
+ ReflectionManager reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+ if ( analyzerClassName != null ) {
+ try {
+ analyzerClass = ReflectHelper.classForName( analyzerClassName );
+ }
+ catch (Exception e) {
+ throw new HibernateException(
+ "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
+ e
+ );
+ }
+ }
+ else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer analyzer;
+ try {
+ analyzer = (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new HibernateException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
+ );
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
+ }
+
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while ( iter.hasNext() ) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if ( mappedClass != null ) {
+ XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+ if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+ DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
+ if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+ lockableDirectoryProviders.put( provider, new ReentrantLock() );
+ }
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+ mappedXClass, analyzer, provider, reflectionManager
+ );
+
+ documentBuilders.put( mappedClass, documentBuilder );
+ }
+ }
+ }
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for ( DocumentBuilder builder : documentBuilders.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ WorkerFactory workerFactory = new WorkerFactory();
+ workerFactory.configure( cfg, this );
+ worker = workerFactory.createWorker();
+ }
+
+ //code doesn't have to be multithreaded because SF creation is not.
+ public static SearchFactory getSearchFactory(Configuration cfg) {
+ WeakHashMap<Configuration, SearchFactory> contextMap = contexts.get();
+ SearchFactory searchFactory = contextMap.get( cfg );
+ if ( searchFactory == null) {
+ searchFactory = new SearchFactory(cfg);
+
+ contextMap.put( cfg, searchFactory );
+ }
+ return searchFactory;
+ }
+
+
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
+
+ public void setDocumentBuilders(Map<Class, DocumentBuilder<Object>> documentBuilders) {
+ this.documentBuilders = documentBuilders;
+ }
+
+ public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
+ return lockableDirectoryProviders;
+ }
+
+ public void setLockableDirectoryProviders(Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ this.lockableDirectoryProviders = lockableDirectoryProviders;
+ }
+
+ public Worker getWorker() {
+ return worker;
+ }
+}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/QueueWorker.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -2,13 +2,10 @@
package org.hibernate.search.backend;
import java.util.Properties;
-import java.util.Map;
import java.util.List;
-import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.search.backend.Work;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
/**
* Execute the work for a given queue
@@ -18,8 +15,7 @@
public interface QueueWorker extends Runnable {
void run();
- void initialize(Properties props, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders);
+ void initialize(Properties props, SearchFactory searchFactory);
void setQueue(List<Work> queue);
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Worker.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -2,14 +2,9 @@
package org.hibernate.search.backend;
import java.util.Properties;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantLock;
-import org.hibernate.event.AbstractEvent;
import org.hibernate.event.EventSource;
-import org.hibernate.cfg.Configuration;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
/**
* Perform work for a given session. This implementation has to be multi threaded
@@ -18,6 +13,5 @@
public interface Worker {
void performWork(Work work, EventSource session);
- void initialize(Properties props, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders);
+ void initialize(Properties props, SearchFactory searchFactory);
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -8,6 +8,7 @@
import org.hibernate.cfg.Configuration;
import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.Environment;
import org.hibernate.util.StringHelper;
@@ -21,12 +22,11 @@
private Map<Class, DocumentBuilder<Object>> documentBuilders;
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Configuration cfg;
+ private SearchFactory searchFactory;
public void configure(Configuration cfg,
- Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
this.cfg = cfg;
}
@@ -67,7 +67,7 @@
throw new AnnotationException("Unable to instanciate worker class: " + impl, e );
}
}
- worker.initialize( props, documentBuilders, lockableDirectoryProviders );
+ worker.initialize( props, searchFactory );
return worker;
}
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/Workspace.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -15,6 +15,7 @@
import org.hibernate.HibernateException;
import org.hibernate.AssertionFailure;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
/**
@@ -31,26 +32,23 @@
//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene use abstract classes, not interfaces
public class Workspace {
private static Log log = LogFactory.getLog( Workspace.class );
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Map<DirectoryProvider, IndexReader> readers = new HashMap<DirectoryProvider, IndexReader>();
private Map<DirectoryProvider, IndexWriter> writers = new HashMap<DirectoryProvider, IndexWriter>();
private List<DirectoryProvider> lockedProviders = new ArrayList<DirectoryProvider>();
+ private SearchFactory searchFactory;
- public Workspace(Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public Workspace(SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
}
public DocumentBuilder getDocumentBuilder(Class entity) {
- return documentBuilders.get( entity );
+ return searchFactory.getDocumentBuilders().get( entity );
}
public IndexReader getIndexReader(Class entity) {
//TODO NPEs
- DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ DirectoryProvider provider = searchFactory.getDocumentBuilders().get( entity ).getDirectoryProvider();
//one cannot access a reader for update after a writer has been accessed
if ( writers.containsKey( provider ) )
throw new AssertionFailure("Tries to read for update a index while a writer is accessed" + entity);
@@ -68,7 +66,7 @@
}
public IndexWriter getIndexWriter(Class entity) {
- DirectoryProvider provider = documentBuilders.get( entity ).getDirectoryProvider();
+ DirectoryProvider provider = searchFactory.getDocumentBuilders().get( entity ).getDirectoryProvider();
//one has to close a reader for update before a writer is accessed
IndexReader reader = readers.get( provider );
if ( reader != null ) {
@@ -85,7 +83,7 @@
lockProvider( provider );
try {
writer = new IndexWriter(
- provider.getDirectory(), documentBuilders.get( entity ).getAnalyzer(), false
+ provider.getDirectory(), searchFactory.getDocumentBuilders().get( entity ).getAnalyzer(), false
); //have been created at init time
writers.put( provider, writer );
}
@@ -97,7 +95,7 @@
private void lockProvider(DirectoryProvider provider) {
//make sure to use a semaphore
- ReentrantLock lock = lockableDirectoryProviders.get( provider );
+ ReentrantLock lock = searchFactory.getLockableDirectoryProviders().get( provider );
//of course a given thread cannot have a race cond with itself
if ( !lock.isHeldByCurrentThread() ) {
lock.lock();
@@ -135,7 +133,7 @@
}
}
for ( DirectoryProvider provider : lockedProviders ) {
- lockableDirectoryProviders.get( provider ).unlock();
+ searchFactory.getLockableDirectoryProviders().get( provider ).unlock();
}
readers.clear();
writers.clear();
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedQueueWorker.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -2,31 +2,27 @@
package org.hibernate.search.backend.impl;
import java.util.List;
-import java.util.Map;
import java.util.Properties;
import java.util.Collections;
import java.util.Comparator;
-import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.QueueWorker;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
/**
* @author Emmanuel Bernard
*/
-class BatchedQueueWorker implements QueueWorker {
+public class BatchedQueueWorker implements QueueWorker {
private List<Work> queue;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
+ private SearchFactory searchFactory;
public void run() {
Workspace workspace;
LuceneWorker worker;
- workspace = new Workspace( documentBuilders, lockableDirectoryProviders );
+ workspace = new Workspace( searchFactory );
worker = new LuceneWorker( workspace );
try {
deadlockFreeQueue(queue, workspace);
@@ -40,10 +36,8 @@
}
}
- public void initialize(Properties props, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
}
public void setQueue(List<Work> queue) {
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/BatchedWorkQueue.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -3,16 +3,13 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.Properties;
-import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ExecutorService;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.UpdateWork;
@@ -29,16 +26,13 @@
public class BatchedWorkQueue implements WorkQueue {
private List<Work> queue = new ArrayList<Work>();
private boolean sync;
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private ExecutorService executorService;
private Properties properties;
+ private SearchFactory searchFactory;
- public BatchedWorkQueue(Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders,
+ public BatchedWorkQueue(SearchFactory searchFactory,
Properties properties) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ this.searchFactory = searchFactory;
this.properties = properties;
//default to sync if none defined
this.sync = ! "async".equalsIgnoreCase( properties.getProperty( Environment.WORKER_PREFIX + "type") );
@@ -69,7 +63,7 @@
//TODO implements parallel batchWorkers (one per Directory)
public void performWork() {
BatchedQueueWorker batchWorker = new BatchedQueueWorker();
- batchWorker.initialize( properties, documentBuilders, lockableDirectoryProviders );
+ batchWorker.initialize( properties, searchFactory );
batchWorker.setQueue( queue );
if (sync) {
batchWorker.run();
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -11,6 +11,7 @@
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.util.WeakIdentityHashMap;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.event.EventSource;
import org.hibernate.Transaction;
@@ -30,6 +31,7 @@
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
private Map<Class, DocumentBuilder<Object>> documentBuilders;
private Properties properties;
+ private SearchFactory searchFactory;
public void performWork(Work work, EventSource session) {
if ( session.isTransactionInProgress() ) {
@@ -37,7 +39,7 @@
PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
queuePerTransaction.get( transaction );
if ( txSync == null || txSync.isConsumed() ) {
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders, lockableDirectoryProviders, properties );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, properties );
txSync = new PostTransactionWorkQueueSynchronization( workQueue, queuePerTransaction );
transaction.registerSynchronization( txSync );
queuePerTransaction.put(transaction, txSync);
@@ -45,17 +47,15 @@
txSync.add( work );
}
else {
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders, lockableDirectoryProviders, properties );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, properties );
PostTransactionWorkQueueSynchronization sync = new PostTransactionWorkQueueSynchronization( workQueue );
sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
}
}
- public void initialize(Properties props, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
- this.documentBuilders = documentBuilders;
- this.lockableDirectoryProviders = lockableDirectoryProviders;
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ this.searchFactory = searchFactory;
this.properties = props;
}
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -26,9 +26,9 @@
import org.hibernate.search.bridge.BridgeFactory;
import org.hibernate.search.bridge.FieldBridge;
import org.hibernate.search.bridge.TwoWayFieldBridge;
-import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.util.BinderHelper;
+import org.hibernate.search.SearchFactory;
import org.hibernate.reflection.ReflectionManager;
import org.hibernate.reflection.XAnnotatedElement;
import org.hibernate.reflection.XClass;
@@ -305,8 +305,8 @@
}
}
- public static Serializable getDocumentId(FullTextIndexEventListener listener, Class clazz, Document document) {
- DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
+ public static Serializable getDocumentId(SearchFactory searchFactory, Class clazz, Document document) {
+ DocumentBuilder builder = searchFactory.getDocumentBuilders().get( clazz );
if ( builder == null ) throw new HibernateException( "No Lucene configuration set up for: " + clazz.getName() );
return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -2,19 +2,10 @@
package org.hibernate.search.event;
import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
-import org.hibernate.HibernateException;
-import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.Configuration;
import org.hibernate.event.AbstractEvent;
import org.hibernate.event.Initializable;
@@ -24,21 +15,12 @@
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.event.PostUpdateEvent;
import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.search.Environment;
-import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.AddWork;
import org.hibernate.search.backend.DeleteWork;
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
-import org.hibernate.search.backend.Worker;
-import org.hibernate.search.backend.WorkerFactory;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.store.DirectoryProviderFactory;
-import org.hibernate.mapping.PersistentClass;
-import org.hibernate.reflection.ReflectionManager;
-import org.hibernate.reflection.XClass;
-import org.hibernate.util.ReflectHelper;
+import org.hibernate.search.SearchFactory;
/**
* This listener supports setting a parent directory for all generated index files.
@@ -52,92 +34,21 @@
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
PostUpdateEventListener, Initializable {
- protected ReflectionManager reflectionManager;
- protected Worker worker;
-
- //FIXME keeping this here is a bad decision since you might want to search indexes wo maintain it
- @Deprecated
- public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
- return documentBuilders;
- }
-
-
- private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
- //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
- new HashMap<DirectoryProvider, ReentrantLock>();
- private boolean initialized;
-
private static final Log log = LogFactory.getLog( FullTextIndexEventListener.class );
+ private SearchFactory searchFactory;
+
public void initialize(Configuration cfg) {
- if ( initialized ) return;
- //yuk
- reflectionManager = ( (AnnotationConfiguration) cfg ).createExtendedMappings().getReflectionManager();
+ searchFactory = SearchFactory.getSearchFactory( cfg );
+ }
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
- if ( analyzerClassName != null ) {
- try {
- analyzerClass = ReflectHelper.classForName( analyzerClassName );
- }
- catch (Exception e) {
- throw new HibernateException(
- "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
- e
- );
- }
- }
- else {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer analyzer;
- try {
- analyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new HibernateException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName
- );
- }
- catch (Exception e) {
- throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName );
- }
-
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
- while ( iter.hasNext() ) {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if ( mappedClass != null ) {
- XClass mappedXClass = reflectionManager.toXClass( mappedClass );
- if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
- DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg );
- if ( !lockableDirectoryProviders.containsKey( provider ) ) {
- lockableDirectoryProviders.put( provider, new ReentrantLock() );
- }
- final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
- mappedXClass, analyzer, provider, reflectionManager
- );
-
- documentBuilders.put( mappedClass, documentBuilder );
- }
- }
- }
- Set<Class> indexedClasses = documentBuilders.keySet();
- for ( DocumentBuilder builder : documentBuilders.values() ) {
- builder.postInitialize( indexedClasses );
- }
- WorkerFactory workerFactory = new WorkerFactory();
- workerFactory.configure( cfg, documentBuilders, lockableDirectoryProviders );
- worker = workerFactory.createWorker();
- initialized = true;
+ public SearchFactory getSearchFactory() {
+ return searchFactory;
}
public void onPostDelete(PostDeleteEvent event) {
- if ( documentBuilders.containsKey( event.getEntity().getClass() ) ) {
+ if ( searchFactory.getDocumentBuilders().containsKey( event.getEntity().getClass() ) ) {
DeleteWork work = new DeleteWork( event.getId(), event.getEntity().getClass() );
processWork( work, event );
}
@@ -145,7 +56,7 @@
public void onPostInsert(PostInsertEvent event) {
final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get( entity.getClass() );
if ( builder != null ) {
Serializable id = event.getId();
Document doc = builder.getDocument( entity, id );
@@ -156,7 +67,7 @@
public void onPostUpdate(PostUpdateEvent event) {
final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get( entity.getClass() );
if ( builder != null ) {
Serializable id = event.getId();
Document doc = builder.getDocument( entity, id );
@@ -166,10 +77,6 @@
}
private void processWork(Work work, AbstractEvent event) {
- worker.performWork( work, event.getSession() );
+ searchFactory.getWorker().performWork( work, event.getSession() );
}
-
- public Map<DirectoryProvider, ReentrantLock> getLockableDirectoryProviders() {
- return lockableDirectoryProviders;
- }
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -8,7 +8,6 @@
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import java.util.concurrent.locks.ReentrantLock;
import javax.transaction.Status;
@@ -25,18 +24,33 @@
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
+import org.hibernate.Interceptor;
+import org.hibernate.ScrollableResults;
+import org.hibernate.ScrollMode;
+import org.hibernate.event.EventListeners;
+import org.hibernate.loader.custom.CustomQuery;
+import org.hibernate.persister.entity.EntityPersister;
+import org.hibernate.jdbc.Batcher;
+import org.hibernate.jdbc.JDBCContext;
+import org.hibernate.collection.PersistentCollection;
import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.SessionFactoryImplementor;
+import org.hibernate.engine.QueryParameters;
+import org.hibernate.engine.EntityKey;
+import org.hibernate.engine.PersistenceContext;
import org.hibernate.impl.SessionImpl;
+import org.hibernate.impl.CriteriaImpl;
import org.hibernate.search.query.FullTextQueryImpl;
-import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.util.ContextHelper;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.backend.UpdateWork;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.impl.BatchedWorkQueue;
import org.hibernate.search.backend.impl.PostTransactionWorkQueueSynchronization;
-import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.FullTextSession;
import org.hibernate.stat.SessionStatistics;
import org.hibernate.type.Type;
@@ -47,7 +61,7 @@
*
* @author Emmanuel Bernard
*/
-public class FullTextSessionImpl implements FullTextSession {
+public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
private final SessionImpl session;
private PostTransactionWorkQueueSynchronization postTransactionWorkQueueSynch;
@@ -75,13 +89,13 @@
public void index(Object entity) {
if (entity == null) return;
Class clazz = entity.getClass();
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
- DocumentBuilder<Object> builder = listener.getDocumentBuilders().get( clazz );
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );
+ DocumentBuilder<Object> builder = searchFactory.getDocumentBuilders().get( clazz );
if ( builder != null ) {
Serializable id = session.getIdentifier( entity );
Document doc = builder.getDocument( entity, id );
UpdateWork work = new UpdateWork( id, entity.getClass(), doc );
- processWork( work, listener.getDocumentBuilders(), listener.getLockableDirectoryProviders() );
+ processWork( work, searchFactory );
}
//TODO
//need to add elements in a queue kept at the Session level
@@ -92,11 +106,10 @@
// this is an open discussion
}
- private void processWork(Work work, Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ private void processWork(Work work, SearchFactory searchFactory) {
if ( session.isTransactionInProgress() ) {
if ( postTransactionWorkQueueSynch == null || postTransactionWorkQueueSynch.isConsumed() ) {
- postTransactionWorkQueueSynch = createWorkQueueSync( documentBuilders, lockableDirectoryProviders);
+ postTransactionWorkQueueSynch = createWorkQueueSync(searchFactory);
session.getTransaction().registerSynchronization( postTransactionWorkQueueSynch );
}
postTransactionWorkQueueSynch.add( work );
@@ -104,17 +117,16 @@
else {
//no transaction work right away
PostTransactionWorkQueueSynchronization sync =
- createWorkQueueSync( documentBuilders, lockableDirectoryProviders );
+ createWorkQueueSync( searchFactory );
sync.add( work );
sync.afterCompletion( Status.STATUS_COMMITTED );
}
}
private PostTransactionWorkQueueSynchronization createWorkQueueSync(
- Map<Class, DocumentBuilder<Object>> documentBuilders,
- Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders) {
+ SearchFactory searchFactory) {
//FIXME should be harmonized with the WorkerFactory?
- WorkQueue workQueue = new BatchedWorkQueue( documentBuilders, lockableDirectoryProviders, new Properties() );
+ WorkQueue workQueue = new BatchedWorkQueue( searchFactory, new Properties() );
return new PostTransactionWorkQueueSynchronization( workQueue );
}
@@ -317,6 +329,156 @@
return session.getEnabledFilter( filterName );
}
+ public Interceptor getInterceptor() {
+ return session.getInterceptor();
+ }
+
+ public void setAutoClear(boolean enabled) {
+ session.setAutoClear( enabled );
+ }
+
+ public boolean isTransactionInProgress() {
+ return session.isTransactionInProgress();
+ }
+
+ public void initializeCollection(PersistentCollection collection, boolean writing) throws HibernateException {
+ session.initializeCollection( collection, writing );
+ }
+
+ public Object internalLoad(String entityName, Serializable id, boolean eager, boolean nullable)
+ throws HibernateException {
+ return session.internalLoad( entityName, id, eager, nullable );
+ }
+
+ public Object immediateLoad(String entityName, Serializable id) throws HibernateException {
+ return session.immediateLoad( entityName, id );
+ }
+
+ public long getTimestamp() {
+ return session.getTimestamp();
+ }
+
+ public SessionFactoryImplementor getFactory() {
+ return session.getFactory();
+ }
+
+ public Batcher getBatcher() {
+ return session.getBatcher();
+ }
+
+ public List list(String query, QueryParameters queryParameters) throws HibernateException {
+ return session.list( query, queryParameters );
+ }
+
+ public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException {
+ return session.iterate( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(String query, QueryParameters queryParameters) throws HibernateException {
+ return session.scroll( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) {
+ return session.scroll( criteria, scrollMode );
+ }
+
+ public List list(CriteriaImpl criteria) {
+ return session.list( criteria );
+ }
+
+ public List listFilter(Object collection, String filter, QueryParameters queryParameters)
+ throws HibernateException {
+ return session.listFilter( collection, filter, queryParameters );
+ }
+
+ public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters)
+ throws HibernateException {
+ return session.iterateFilter( collection, filter, queryParameters );
+ }
+
+ public EntityPersister getEntityPersister(String entityName, Object object) throws HibernateException {
+ return session.getEntityPersister( entityName, object );
+ }
+
+ public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException {
+ return session.getEntityUsingInterceptor( key );
+ }
+
+ public void afterTransactionCompletion(boolean successful, Transaction tx) {
+ session.afterTransactionCompletion( successful, tx );
+ }
+
+ public void beforeTransactionCompletion(Transaction tx) {
+ session.beforeTransactionCompletion( tx );
+ }
+
+ public Serializable getContextEntityIdentifier(Object object) {
+ return session.getContextEntityIdentifier( object );
+ }
+
+ public String bestGuessEntityName(Object object) {
+ return session.bestGuessEntityName( object );
+ }
+
+ public String guessEntityName(Object entity) throws HibernateException {
+ return session.guessEntityName( entity );
+ }
+
+ public Object instantiate(String entityName, Serializable id) throws HibernateException {
+ return session.instantiate( entityName, id );
+ }
+
+ public List listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters) throws HibernateException {
+ return session.listCustomQuery( customQuery, queryParameters );
+ }
+
+ public ScrollableResults scrollCustomQuery(CustomQuery customQuery, QueryParameters queryParameters)
+ throws HibernateException {
+ return session.scrollCustomQuery( customQuery, queryParameters );
+ }
+
+ public List list(NativeSQLQuerySpecification spec, QueryParameters queryParameters) throws HibernateException {
+ return session.list( spec, queryParameters );
+ }
+
+ public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters queryParameters)
+ throws HibernateException {
+ return session.scroll( spec, queryParameters );
+ }
+
+ public Object getFilterParameterValue(String filterParameterName) {
+ return session.getFilterParameterValue( filterParameterName );
+ }
+
+ public Type getFilterParameterType(String filterParameterName) {
+ return session.getFilterParameterType( filterParameterName );
+ }
+
+ public Map getEnabledFilters() {
+ return session.getEnabledFilters();
+ }
+
+ public int getDontFlushFromFind() {
+ return session.getDontFlushFromFind();
+ }
+
+ public EventListeners getListeners() {
+ return session.getListeners();
+ }
+
+ public PersistenceContext getPersistenceContext() {
+ return session.getPersistenceContext();
+ }
+
+ public int executeUpdate(String query, QueryParameters queryParameters) throws HibernateException {
+ return session.executeUpdate( query, queryParameters );
+ }
+
+ public int executeNativeUpdate(NativeSQLQuerySpecification specification, QueryParameters queryParameters)
+ throws HibernateException {
+ return session.executeNativeUpdate( specification, queryParameters );
+ }
+
public EntityMode getEntityMode() {
return session.getEntityMode();
}
@@ -337,6 +499,34 @@
return session.getNamedQuery( queryName );
}
+ public Query getNamedSQLQuery(String name) {
+ return session.getNamedSQLQuery( name );
+ }
+
+ public boolean isEventSource() {
+ return session.isEventSource();
+ }
+
+ public void afterScrollOperation() {
+ session.afterScrollOperation();
+ }
+
+ public void setFetchProfile(String name) {
+ session.setFetchProfile( name );
+ }
+
+ public String getFetchProfile() {
+ return session.getFetchProfile();
+ }
+
+ public JDBCContext getJDBCContext() {
+ return session.getJDBCContext();
+ }
+
+ public boolean isClosed() {
+ return session.isClosed();
+ }
+
public org.hibernate.Session getSession(EntityMode entityMode) {
return session.getSession( entityMode );
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -34,7 +34,7 @@
import org.hibernate.engine.query.ParameterMetadata;
import org.hibernate.impl.AbstractQueryImpl;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.util.ContextHelper;
/**
@@ -71,9 +71,9 @@
//user stop using it
//scrollable is better in this area
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
Hits hits = searcher.search( query );
@@ -85,7 +85,7 @@
Document document = hits.doc( index );
EntityInfo entityInfo = new EntityInfo();
entityInfo.clazz = DocumentBuilder.getDocumentClass( document );
- entityInfo.id = DocumentBuilder.getDocumentId( listener, entityInfo.clazz, document );
+ entityInfo.id = DocumentBuilder.getDocumentId( searchFactory, entityInfo.clazz, document );
entityInfos.add( entityInfo );
}
return new IteratorImpl( entityInfos, (Session) this.session );
@@ -105,9 +105,9 @@
public ScrollableResults scroll() throws HibernateException {
//keep the searcher open until the resultset is closed
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );;
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
Hits hits;
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
@@ -115,7 +115,7 @@
setResultSize( hits );
int first = first();
int max = max( first, hits );
- return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session, listener );
+ return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session, searchFactory );
}
catch (IOException e) {
try {
@@ -134,9 +134,9 @@
}
public List list() throws HibernateException {
- FullTextIndexEventListener listener = ContextHelper.getLuceneEventListener( session );;
+ SearchFactory searchFactory = ContextHelper.getSearchFactory( session );;
//find the directories
- Searcher searcher = buildSearcher( listener );
+ Searcher searcher = buildSearcher( searchFactory );
Hits hits;
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
@@ -149,7 +149,7 @@
for ( int index = first; index <= max; index++ ) {
Document document = hits.doc( index );
Class clazz = DocumentBuilder.getDocumentClass( document );
- Serializable id = DocumentBuilder.getDocumentId( listener, clazz, document );
+ Serializable id = DocumentBuilder.getDocumentId( searchFactory, clazz, document );
result.add( sess.load( clazz, id ) );
//use load to benefit from the batch-size
//we don't face proxy casting issues since the exact class is extracted from the index
@@ -210,8 +210,8 @@
}
//TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
- private Searcher buildSearcher(FullTextIndexEventListener listener) {
- Map<Class, DocumentBuilder<Object>> builders = listener.getDocumentBuilders();
+ private Searcher buildSearcher(SearchFactory searchFactory) {
+ Map<Class, DocumentBuilder<Object>> builders = searchFactory.getDocumentBuilders();
Set<Directory> directories = new HashSet<Directory>();
if ( classes == null || classes.length == 0 ) {
//no class means all classes
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -18,7 +18,7 @@
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
import org.hibernate.type.Type;
/**
@@ -31,11 +31,11 @@
private final int max;
private int current;
private final Session session;
- private final FullTextIndexEventListener listener;
private EntityInfo[] entityInfos;
+ private final SearchFactory searchFactory;
public ScrollableResultsImpl(
- Searcher searcher, Hits hits, int first, int max, Session session, FullTextIndexEventListener listener
+ Searcher searcher, Hits hits, int first, int max, Session session, SearchFactory searchFactory
) {
this.searcher = searcher;
this.hits = hits;
@@ -43,7 +43,7 @@
this.max = max;
this.current = first;
this.session = session;
- this.listener = listener;
+ this.searchFactory = searchFactory;
entityInfos = new EntityInfo[max - first + 1];
}
@@ -109,7 +109,7 @@
}
info.clazz = DocumentBuilder.getDocumentClass( document );
//FIXME should check that clazz match classes but this complexify a lot the firstResult/maxResult
- info.id = DocumentBuilder.getDocumentId( listener, info.clazz, document );
+ info.id = DocumentBuilder.getDocumentId( searchFactory, info.clazz, document );
entityInfos[current - first] = info;
}
return new Object[]{
Modified: branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/java/org/hibernate/search/util/ContextHelper.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -5,13 +5,14 @@
import org.hibernate.engine.SessionImplementor;
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.SearchFactory;
/**
* @author Emmanuel Bernard
*/
public abstract class ContextHelper {
- public static FullTextIndexEventListener getLuceneEventListener(SessionImplementor session) {
+ public static SearchFactory getSearchFactory(SessionImplementor session) {
PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
FullTextIndexEventListener listener = null;
//FIXME this sucks since we mandante the event listener use
@@ -22,6 +23,6 @@
}
}
if ( listener == null ) throw new HibernateException( "Lucene event listener not initialized" );
- return listener;
+ return listener.getSearchFactory();
}
}
Modified: branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java
===================================================================
--- branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2007-01-09 16:04:16 UTC (rev 11037)
+++ branches/Branch_3_2/HibernateExt/metadata/src/test/org/hibernate/search/test/TestCase.java 2007-01-10 20:13:22 UTC (rev 11038)
@@ -22,7 +22,7 @@
}
protected Directory getDirectory(Class clazz) {
- return getLuceneEventListener().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
+ return getLuceneEventListener().getSearchFactory().getDocumentBuilders().get( clazz ).getDirectoryProvider().getDirectory();
}
private FullTextIndexEventListener getLuceneEventListener() {
@@ -42,9 +42,9 @@
protected void configure(org.hibernate.cfg.Configuration cfg) {
cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
- FullTextIndexEventListener del = new FullTextIndexEventListener();
- cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
- cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
- cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
+ //FullTextIndexEventListener del = new FullTextIndexEventListener();
+ cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{ new FullTextIndexEventListener() } );
+ cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{ new FullTextIndexEventListener() } );
+ cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{ new FullTextIndexEventListener() } );
}
}
18 years
Hibernate SVN: r11037 - in trunk/Hibernate3: src/org/hibernate/type test/org/hibernate/test/ops
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-01-09 11:04:16 -0500 (Tue, 09 Jan 2007)
New Revision: 11037
Modified:
trunk/Hibernate3/src/org/hibernate/type/CollectionType.java
trunk/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml
trunk/Hibernate3/test/org/hibernate/test/ops/MergeTest.java
Log:
HHH-2292 : merge() : detached + bare collections
Modified: trunk/Hibernate3/src/org/hibernate/type/CollectionType.java
===================================================================
--- trunk/Hibernate3/src/org/hibernate/type/CollectionType.java 2007-01-09 16:03:50 UTC (rev 11036)
+++ trunk/Hibernate3/src/org/hibernate/type/CollectionType.java 2007-01-09 16:04:16 UTC (rev 11037)
@@ -414,8 +414,6 @@
SessionImplementor session) {
// TODO: does not work for EntityMode.DOM4J yet!
java.util.Collection result = ( java.util.Collection ) target;
- final boolean isPC = ( result instanceof PersistentCollection );
- final boolean wasOriginalDirty = ( original instanceof PersistentCollection && ( ( PersistentCollection ) original ).isDirty() );
result.clear();
// copy elements into newly empty target collection
@@ -425,8 +423,19 @@
result.add( elemType.replace( iter.next(), null, session, owner, copyCache ) );
}
- if ( result instanceof PersistentCollection && !wasOriginalDirty ) {
- ( ( PersistentCollection ) result ).clearDirty();
+ // if the original is a PersistentCollection, and that original
+ // was not flagged as dirty, then reset the target's dirty flag
+ // here after the copy operation.
+ // </p>
+ // One thing to be careful of here is a "bare" original collection
+ // in which case we should never ever ever reset the dirty flag
+ // on the target because we simply do not know...
+ if ( original instanceof PersistentCollection ) {
+ if ( result instanceof PersistentCollection ) {
+ if ( ! ( ( PersistentCollection ) original ).isDirty() ) {
+ ( ( PersistentCollection ) result ).clearDirty();
+ }
+ }
}
return result;
Modified: trunk/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml
===================================================================
--- trunk/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml 2007-01-09 16:03:50 UTC (rev 11036)
+++ trunk/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml 2007-01-09 16:04:16 UTC (rev 11037)
@@ -13,9 +13,7 @@
<id name="id">
<generator class="native"/>
</id>
- <list name="competitors"
- cascade="persist,merge"
- table="COMPET_ION_OR">
+ <list name="competitors" table="COMPET_ION_OR" cascade="persist,merge,delete">
<key column="TION_ID"/>
<list-index column="INDEX_COL"/>
<many-to-many class="Competitor" column="TOR_ID" />
Modified: trunk/Hibernate3/test/org/hibernate/test/ops/MergeTest.java
===================================================================
--- trunk/Hibernate3/test/org/hibernate/test/ops/MergeTest.java 2007-01-09 16:03:50 UTC (rev 11036)
+++ trunk/Hibernate3/test/org/hibernate/test/ops/MergeTest.java 2007-01-09 16:04:16 UTC (rev 11037)
@@ -570,28 +570,44 @@
cleanup();
}
- public void testMergeManyToManyWithColelctionDeference() throws Exception {
+ public void testMergeManyToManyWithCollectionDeference() throws Exception {
+ // setup base data...
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
Competition competition = new Competition();
- competition.getCompetitors().add( new Competitor("Name") );
+ competition.getCompetitors().add( new Competitor( "Name" ) );
competition.getCompetitors().add( new Competitor() );
competition.getCompetitors().add( new Competitor() );
- Session s = openSession( );
- Transaction tx = s.beginTransaction();
s.persist( competition );
- s.flush();
- s.clear();
+ tx.commit();
+ s.close();
+
+ // the competition graph is now detached:
+ // 1) create a new List reference to represent the competitors
+ s = openSession();
+ tx = s.beginTransaction();
List newComp = new ArrayList();
- newComp.add( competition.getCompetitors().get(0) );
+ Competitor originalCompetitor = ( Competitor ) competition.getCompetitors().get( 0 );
+ originalCompetitor.setName( "Name2" );
+ newComp.add( originalCompetitor );
newComp.add( new Competitor() );
- ( (Competitor) newComp.get(0) ).setName( "Name2" );
+ // 2) set that new List reference unto the Competition reference
competition.setCompetitors( newComp );
- competition = (Competition) s.merge( competition );
- s.flush();
- s.clear();
- competition = (Competition) s.get( Competition.class, competition.getId() );
+ // 3) attempt the merge
+ Competition competition2 = ( Competition ) s.merge( competition );
+ tx.commit();
+ s.close();
+
+ assertFalse( competition == competition2 );
+ assertFalse( competition.getCompetitors() == competition2.getCompetitors() );
+ assertEquals( 2, competition2.getCompetitors().size() );
+
+ s = openSession();
+ tx = s.beginTransaction();
+ competition = ( Competition ) s.get( Competition.class, competition.getId() );
assertEquals( 2, competition.getCompetitors().size() );
- assertEquals( "Name2", ( (Competitor) competition.getCompetitors().get(0) ).getName() );
- tx.rollback();
+ s.delete( competition );
+ tx.commit();
s.close();
cleanup();
@@ -600,14 +616,19 @@
private void cleanup() {
Session s = openSession();
s.beginTransaction();
- s.createQuery("delete from NumberedNode where parent is not null").executeUpdate();
- s.createQuery("delete from NumberedNode").executeUpdate();
- s.createQuery("delete from Node where parent is not null").executeUpdate();
- s.createQuery("delete from Node").executeUpdate();
- s.createQuery("delete from VersionedEntity where parent is not null").executeUpdate();
- s.createQuery("delete from VersionedEntity").executeUpdate();
- s.createQuery("delete from TimestampedEntity").executeUpdate();
+ s.createQuery( "delete from NumberedNode where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from NumberedNode" ).executeUpdate();
+ s.createQuery( "delete from Node where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from Node" ).executeUpdate();
+
+ s.createQuery( "delete from VersionedEntity where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from VersionedEntity" ).executeUpdate();
+ s.createQuery( "delete from TimestampedEntity" ).executeUpdate();
+
+ s.createQuery( "delete from Competitor" ).executeUpdate();
+ s.createQuery( "delete from Competition" ).executeUpdate();
+
Iterator itr = s.createQuery( "from Employer" ).list().iterator();
while ( itr.hasNext() ) {
final Employer employer = ( Employer ) itr.next();
18 years
Hibernate SVN: r11036 - in branches/Branch_3_2/Hibernate3: src/org/hibernate/type test/org/hibernate/test/ops
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-01-09 11:03:50 -0500 (Tue, 09 Jan 2007)
New Revision: 11036
Modified:
branches/Branch_3_2/Hibernate3/src/org/hibernate/type/CollectionType.java
branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml
branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/MergeTest.java
Log:
HHH-2292 : merge() : detached + bare collections
Modified: branches/Branch_3_2/Hibernate3/src/org/hibernate/type/CollectionType.java
===================================================================
--- branches/Branch_3_2/Hibernate3/src/org/hibernate/type/CollectionType.java 2007-01-09 13:48:44 UTC (rev 11035)
+++ branches/Branch_3_2/Hibernate3/src/org/hibernate/type/CollectionType.java 2007-01-09 16:03:50 UTC (rev 11036)
@@ -414,8 +414,6 @@
SessionImplementor session) {
// TODO: does not work for EntityMode.DOM4J yet!
java.util.Collection result = ( java.util.Collection ) target;
- final boolean isPC = ( result instanceof PersistentCollection );
- final boolean wasOriginalDirty = ( original instanceof PersistentCollection && ( ( PersistentCollection ) original ).isDirty() );
result.clear();
// copy elements into newly empty target collection
@@ -425,8 +423,19 @@
result.add( elemType.replace( iter.next(), null, session, owner, copyCache ) );
}
- if ( result instanceof PersistentCollection && !wasOriginalDirty ) {
- ( ( PersistentCollection ) result ).clearDirty();
+ // if the original is a PersistentCollection, and that original
+ // was not flagged as dirty, then reset the target's dirty flag
+ // here after the copy operation.
+ // </p>
+ // One thing to be careful of here is a "bare" original collection
+ // in which case we should never ever ever reset the dirty flag
+ // on the target because we simply do not know...
+ if ( original instanceof PersistentCollection ) {
+ if ( result instanceof PersistentCollection ) {
+ if ( ! ( ( PersistentCollection ) original ).isDirty() ) {
+ ( ( PersistentCollection ) result ).clearDirty();
+ }
+ }
}
return result;
Modified: branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml
===================================================================
--- branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml 2007-01-09 13:48:44 UTC (rev 11035)
+++ branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/Competition.hbm.xml 2007-01-09 16:03:50 UTC (rev 11036)
@@ -1,33 +1,31 @@
<?xml version="1.0"?>
-<!DOCTYPE hibernate-mapping PUBLIC
+<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!--
-
+
-->
<hibernate-mapping package="org.hibernate.test.ops">
-
+
<class name="Competition">
<id name="id">
<generator class="native"/>
</id>
- <list name="competitors"
- cascade="persist,merge"
- table="COMPET_ION_OR">
+ <list name="competitors" table="COMPET_ION_OR" cascade="persist,merge,delete">
<key column="TION_ID"/>
<list-index column="INDEX_COL"/>
<many-to-many class="Competitor" column="TOR_ID" />
</list>
</class>
-
+
<class name="Competitor">
<id name="id">
<generator class="native"/>
</id>
<property name="name"/>
</class>
-
+
</hibernate-mapping>
Modified: branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/MergeTest.java
===================================================================
--- branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/MergeTest.java 2007-01-09 13:48:44 UTC (rev 11035)
+++ branches/Branch_3_2/Hibernate3/test/org/hibernate/test/ops/MergeTest.java 2007-01-09 16:03:50 UTC (rev 11036)
@@ -570,28 +570,44 @@
cleanup();
}
- public void testMergeManyToManyWithColelctionDeference() throws Exception {
+ public void testMergeManyToManyWithCollectionDeference() throws Exception {
+ // setup base data...
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
Competition competition = new Competition();
- competition.getCompetitors().add( new Competitor("Name") );
+ competition.getCompetitors().add( new Competitor( "Name" ) );
competition.getCompetitors().add( new Competitor() );
competition.getCompetitors().add( new Competitor() );
- Session s = openSession( );
- Transaction tx = s.beginTransaction();
s.persist( competition );
- s.flush();
- s.clear();
+ tx.commit();
+ s.close();
+
+ // the competition graph is now detached:
+ // 1) create a new List reference to represent the competitors
+ s = openSession();
+ tx = s.beginTransaction();
List newComp = new ArrayList();
- newComp.add( competition.getCompetitors().get(0) );
+ Competitor originalCompetitor = ( Competitor ) competition.getCompetitors().get( 0 );
+ originalCompetitor.setName( "Name2" );
+ newComp.add( originalCompetitor );
newComp.add( new Competitor() );
- ( (Competitor) newComp.get(0) ).setName( "Name2" );
+ // 2) set that new List reference unto the Competition reference
competition.setCompetitors( newComp );
- competition = (Competition) s.merge( competition );
- s.flush();
- s.clear();
- competition = (Competition) s.get( Competition.class, competition.getId() );
+ // 3) attempt the merge
+ Competition competition2 = ( Competition ) s.merge( competition );
+ tx.commit();
+ s.close();
+
+ assertFalse( competition == competition2 );
+ assertFalse( competition.getCompetitors() == competition2.getCompetitors() );
+ assertEquals( 2, competition2.getCompetitors().size() );
+
+ s = openSession();
+ tx = s.beginTransaction();
+ competition = ( Competition ) s.get( Competition.class, competition.getId() );
assertEquals( 2, competition.getCompetitors().size() );
- assertEquals( "Name2", ( (Competitor) competition.getCompetitors().get(0) ).getName() );
- tx.rollback();
+ s.delete( competition );
+ tx.commit();
s.close();
cleanup();
@@ -600,14 +616,19 @@
private void cleanup() {
Session s = openSession();
s.beginTransaction();
- s.createQuery("delete from NumberedNode where parent is not null").executeUpdate();
- s.createQuery("delete from NumberedNode").executeUpdate();
- s.createQuery("delete from Node where parent is not null").executeUpdate();
- s.createQuery("delete from Node").executeUpdate();
- s.createQuery("delete from VersionedEntity where parent is not null").executeUpdate();
- s.createQuery("delete from VersionedEntity").executeUpdate();
- s.createQuery("delete from TimestampedEntity").executeUpdate();
+ s.createQuery( "delete from NumberedNode where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from NumberedNode" ).executeUpdate();
+ s.createQuery( "delete from Node where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from Node" ).executeUpdate();
+
+ s.createQuery( "delete from VersionedEntity where parent is not null" ).executeUpdate();
+ s.createQuery( "delete from VersionedEntity" ).executeUpdate();
+ s.createQuery( "delete from TimestampedEntity" ).executeUpdate();
+
+ s.createQuery( "delete from Competitor" ).executeUpdate();
+ s.createQuery( "delete from Competition" ).executeUpdate();
+
Iterator itr = s.createQuery( "from Employer" ).list().iterator();
while ( itr.hasNext() ) {
final Employer employer = ( Employer ) itr.next();
18 years
Hibernate SVN: r11035 - trunk/Hibernate3/lib
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-01-09 08:48:44 -0500 (Tue, 09 Jan 2007)
New Revision: 11035
Removed:
trunk/Hibernate3/lib/ehcache-1.2.jar
Log:
oops, old ehcache did not get removed...
Deleted: trunk/Hibernate3/lib/ehcache-1.2.jar
===================================================================
(Binary files differ)
18 years
Hibernate SVN: r11034 - in branches/HQL_ANTLR_2/Hibernate3: src/org/hibernate/hql/ast/resolve src/org/hibernate/hql/ast/util test/org/hibernate/test/hql test/org/hibernate/test/hql/redesign test/org/hibernate/test/hql/util
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-01-09 08:43:12 -0500 (Tue, 09 Jan 2007)
New Revision: 11034
Added:
branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/util/
branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/util/PathHelperTest.java
Modified:
branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java
branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/ASTUtil.java
branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/PathHelper.java
branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ResolverTest.java
Log:
cleanup
Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java
===================================================================
--- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java 2007-01-09 11:39:15 UTC (rev 11033)
+++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java 2007-01-09 13:43:12 UTC (rev 11034)
@@ -254,18 +254,6 @@
throw new QueryException( "Unrecognized join type [" + joinType.getText() + "]" );
}
- protected String reconstitutePathString(AST propertyReference) {
- AST child = propertyReference.getFirstChild();
- String prefix = "";
- StringBuffer buffer = new StringBuffer();
- while ( child != null ) {
- buffer.append( prefix ).append( child.getText() );
- prefix = ".";
- child = child.getNextSibling();
- }
- return buffer.toString();
- }
-
protected AST handleSelectedPropertyRef(AST propertyRef) {
if ( propertyRef.getType() == PROPERTY_REF ) {
PropertyReference ref = ( PropertyReference ) propertyRef;
Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/ASTUtil.java
===================================================================
--- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/ASTUtil.java 2007-01-09 11:39:15 UTC (rev 11033)
+++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/ASTUtil.java 2007-01-09 13:43:12 UTC (rev 11034)
@@ -11,47 +11,61 @@
/**
* Provides utility methods for AST traversal and manipulation.
*
- * @author Joshua Davis (pgmjsd(a)sourceforge.net)
+ * @author Joshua Davis
+ * @author Steve Ebersole
*/
public final class ASTUtil {
/**
- * Private empty constructor.
- * (or else checkstyle says: 'warning: Utility classes should not have a public or default constructor.')
+ * Disallow instantiation.
*
- * @deprecated (tell clover to ignore this)
+ * @deprecated (tellclovertoignorethis)
*/
private ASTUtil() {
}
/**
* Creates a single node AST.
+ * <p/>
+ * TODO : remove this; this is silly...
*
* @param astFactory The factory.
- * @param type The node type.
- * @param text The node text.
+ * @param type The node type.
+ * @param text The node text.
+ *
* @return AST - A single node tree.
*/
public static AST create(ASTFactory astFactory, int type, String text) {
- AST node = astFactory.create( type, text );
- return node;
+ return astFactory.create( type, text );
}
/**
- * Creates a single node AST as a sibling.
+ * Creates a single node AST as a sibling of the passed prevSibling,
+ * taking care to reorganize the tree correctly to account for this
+ * newly created node.
*
- * @param astFactory The factory.
- * @param type The node type.
- * @param text The node text.
+ * @param astFactory The factory.
+ * @param type The node type.
+ * @param text The node text.
* @param prevSibling The previous sibling.
- * @return AST - A single node tree.
+ *
+ * @return The created AST node.
*/
public static AST createSibling(ASTFactory astFactory, int type, String text, AST prevSibling) {
AST node = astFactory.create( type, text );
- node.setNextSibling( prevSibling.getNextSibling() );
- prevSibling.setNextSibling( node );
- return node;
+ return insertSibling( node, prevSibling );
}
+ /**
+ * Inserts a node into a child subtree as a particularly positioned
+ * sibling taking care to properly reorganize the tree to account for this
+ * new addition.
+ *
+ * @param node The node to insert
+ * @param prevSibling The previous node at the sibling position
+ * where we want this node inserted.
+ *
+ * @return The return is the same as the node parameter passed in.
+ */
public static AST insertSibling(AST node, AST prevSibling) {
node.setNextSibling( prevSibling.getNextSibling() );
prevSibling.setNextSibling( node );
@@ -62,11 +76,12 @@
* Creates a 'binary operator' subtree, given the information about the
* parent and the two child nodex.
*
- * @param factory The AST factory.
+ * @param factory The AST factory.
* @param parentType The type of the parent node.
* @param parentText The text of the parent node.
- * @param child1 The first child.
- * @param child2 The second child.
+ * @param child1 The first child.
+ * @param child2 The second child.
+ *
* @return AST - A new sub-tree of the form "(parent child1 child2)"
*/
public static AST createBinarySubtree(ASTFactory factory, int parentType, String parentText, AST child1, AST child2) {
@@ -79,10 +94,11 @@
* Creates a single parent of the specified child (i.e. a 'unary operator'
* subtree).
*
- * @param factory The AST factory.
+ * @param factory The AST factory.
* @param parentType The type of the parent node.
* @param parentText The text of the parent node.
- * @param child The child.
+ * @param child The child.
+ *
* @return AST - A new sub-tree of the form "(parent child)"
*/
public static AST createParent(ASTFactory factory, int parentType, String parentText, AST child) {
@@ -107,7 +123,8 @@
* Finds the first node of the specified type in the chain of children.
*
* @param parent The parent
- * @param type The type to find.
+ * @param type The type to find.
+ *
* @return The first node of the specified type, or null if not found.
*/
public static AST findTypeInChildren(AST parent, int type) {
@@ -122,6 +139,7 @@
* Returns the last direct child of 'n'.
*
* @param n The parent
+ *
* @return The last direct child of 'n'.
*/
public static AST getLastChild(AST n) {
@@ -132,6 +150,7 @@
* Returns the last sibling of 'a'.
*
* @param a The sibling.
+ *
* @return The last sibling of 'a'.
*/
private static AST getLastSibling(AST a) {
@@ -147,6 +166,7 @@
* Returns the 'list' representation with some brackets around it for debugging.
*
* @param n The tree.
+ *
* @return The list representation of the tree.
*/
public static String getDebugString(AST n) {
@@ -161,7 +181,8 @@
* Find the previous sibling in the parent for the given child.
*
* @param parent the parent node
- * @param child the child to find the previous sibling of
+ * @param child the child to find the previous sibling of
+ *
* @return the previous sibling of the child
*/
public static AST findPreviousSibling(AST parent, AST child) {
@@ -181,7 +202,7 @@
* Makes the child node a sibling of the parent, reconnecting all siblings.
*
* @param parent the parent
- * @param child the child
+ * @param child the child
*/
public static void makeSiblingOfParent(AST parent, AST child) {
AST prev = findPreviousSibling( parent, child );
@@ -230,7 +251,7 @@
* Inserts the child as the first child of the parent, all other children are shifted over to the 'right'.
*
* @param parent the parent
- * @param child the new first child
+ * @param child the new first child
*/
public static void insertChild(AST parent, AST child) {
if ( parent.getFirstChild() == null ) {
@@ -243,6 +264,13 @@
}
}
+ private static ASTArray createAstArray(ASTFactory factory, int size, int parentType, String parentText, AST child1) {
+ ASTArray array = new ASTArray( size );
+ array.add( factory.create( parentType, parentText ) );
+ array.add( child1 );
+ return array;
+ }
+
/**
* Filters nodes out of a tree.
*/
@@ -251,6 +279,7 @@
* Returns true if the node should be filtered out.
*
* @param n The node.
+ *
* @return true if the node should be filtered out, false to keep the node.
*/
boolean exclude(AST n);
@@ -267,17 +296,7 @@
public abstract boolean include(AST node);
}
- private static ASTArray createAstArray(ASTFactory factory, int size, int parentType, String parentText, AST child1) {
- ASTArray array = new ASTArray( size );
- array.add( factory.create( parentType, parentText ) );
- array.add( child1 );
- return array;
- }
-
public static List collectChildren(AST root, FilterPredicate predicate) {
-// List children = new ArrayList();
-// collectChildren( children, root, predicate );
-// return children;
return new CollectingNodeVisitor( predicate ).collect( root );
}
@@ -305,14 +324,4 @@
return collectedNodes;
}
}
-
- private static void collectChildren(List children, AST root, FilterPredicate predicate) {
- for ( AST n = root.getFirstChild(); n != null; n = n.getNextSibling() ) {
- if ( predicate == null || !predicate.exclude( n ) ) {
- children.add( n );
- }
- collectChildren( children, n, predicate );
- }
- }
-
}
Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/PathHelper.java
===================================================================
--- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/PathHelper.java 2007-01-09 11:39:15 UTC (rev 11033)
+++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/util/PathHelper.java 2007-01-09 13:43:12 UTC (rev 11034)
@@ -11,9 +11,12 @@
import org.apache.commons.logging.LogFactory;
/**
- * Provides utility methods for paths.
+ * Provides utility methods for dealing with path expressions.
+ * <p/>
+ * Note that these utilities do not properly account for index operations.
*
- * @author josh Sep 14, 2004 8:16:29 AM
+ * @author Joshua Davis
+ * @author Steve Ebersole
*/
public final class PathHelper {
@@ -48,6 +51,37 @@
return lhs;
}
+ /**
+ * Provides the inverse functionality of {@link #parsePath}. In other words, for any path
+ * 'p' not involving index operations, p == reconstitutePathString( parsePath( p, someASTFactory ) ).
+ *
+ * @param pathAST The path AST structure.
+ * @return The corresponding path string.
+ */
+ public static String rebuildPathExpression(AST pathAST) {
+ final StringBuffer buffer = new StringBuffer();
+ visitExpression( pathAST, buffer );
+ return buffer.toString();
+ }
+
+ private static void visitExpression(AST expression, StringBuffer buffer) {
+ if ( HqlSqlTokenTypes.DOT == expression.getType() ) {
+ visitDot( expression.getFirstChild(), expression.getFirstChild().getNextSibling(), buffer );
+ }
+ else if ( HqlSqlTokenTypes.IDENT == expression.getType() ) {
+ visitIdent( expression, buffer );
+ }
+ }
+
+ private static void visitDot(AST lhs, AST rhs, StringBuffer buffer) {
+ visitExpression( lhs, buffer );
+ buffer.append( '.' ).append( rhs.getText() );
+ }
+
+ private static void visitIdent(AST ident, StringBuffer buffer) {
+ buffer.append( ident.getText() );
+ }
+
public static String getAlias(String path) {
return StringHelper.root( path );
}
Modified: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ResolverTest.java
===================================================================
--- branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ResolverTest.java 2007-01-09 11:39:15 UTC (rev 11033)
+++ branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ResolverTest.java 2007-01-09 13:43:12 UTC (rev 11034)
@@ -228,7 +228,7 @@
}
}
public static void assertJoinCount(int expected, AST tree) {
- JoinCounter.assertJoinCount( "incorrect join count", expected, tree );
+ JoinCounter.assertJoinCount( "unexpected join count", expected, tree );
}
public static void assertJoinCount(String failMessage, int expected, AST tree) {
JoinCounter counter = new JoinCounter();
@@ -246,7 +246,7 @@
}
}
public static void assertSelectExpressionCount(int expected, AST tree) {
- SelectExpressionCounter.assertSelectExpressionCount( "incorrect select expression count", expected, tree );
+ SelectExpressionCounter.assertSelectExpressionCount( "unexpected select expression count", expected, tree );
}
public static void assertSelectExpressionCount(String failMessage, int expected, AST tree) {
SelectExpressionCounter counter = new SelectExpressionCounter();
Added: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/util/PathHelperTest.java
===================================================================
--- branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/util/PathHelperTest.java 2007-01-09 11:39:15 UTC (rev 11033)
+++ branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/util/PathHelperTest.java 2007-01-09 13:43:12 UTC (rev 11034)
@@ -0,0 +1,28 @@
+package org.hibernate.test.hql.util;
+
+import junit.framework.TestCase;
+
+import org.hibernate.hql.ast.util.PathHelper;
+import org.hibernate.hql.ast.util.ASTPrinter;
+import org.hibernate.hql.ast.HqlASTFactory;
+import org.hibernate.hql.antlr.HqlTokenTypes;
+
+import antlr.collections.AST;
+
+/**
+ * {@inheritDoc}
+ *
+ * @author Steve Ebersole
+ */
+public class PathHelperTest extends TestCase {
+
+ private HqlASTFactory astFactory = new HqlASTFactory();
+ private ASTPrinter printer = new ASTPrinter( HqlTokenTypes.class );
+
+ public void testSimpleDotStructures() {
+ String path = "a.b.c";
+ AST pathAST = PathHelper.parsePath( path, astFactory );
+ System.out.println( printer.showAsString( pathAST, "Path AST" ) );
+ assertEquals( path, PathHelper.rebuildPathExpression( pathAST ) );
+ }
+}
18 years
Hibernate SVN: r11033 - tags/TOOLS_3_2_0_BETA9
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2007-01-09 06:39:15 -0500 (Tue, 09 Jan 2007)
New Revision: 11033
Added:
tags/TOOLS_3_2_0_BETA9/HibernateExt/
Log:
(proper) 3.2.0b9 tag
Copied: tags/TOOLS_3_2_0_BETA9/HibernateExt (from rev 11032, branches/Branch_3_2/HibernateExt)
18 years
Hibernate SVN: r11032 - in branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool: hbm2x test
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2007-01-09 06:33:19 -0500 (Tue, 09 Jan 2007)
New Revision: 11032
Modified:
branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/DocExporterTest.java
branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/test/TestHelper.java
Log:
teardown and less strict java code check (useless type casts are allowed)
Modified: branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/DocExporterTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/DocExporterTest.java 2007-01-09 11:25:49 UTC (rev 11031)
+++ branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/DocExporterTest.java 2007-01-09 11:33:19 UTC (rev 11032)
@@ -40,9 +40,7 @@
exporter.start();
}
- protected void tearDown() throws Exception {
- super.tearDown();
- }
+
public void testExporter() {
assertFileAndExists(new File(getOutputDir(), "index.html") );
Modified: branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/test/TestHelper.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/test/TestHelper.java 2007-01-09 11:25:49 UTC (rev 11031)
+++ branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/test/TestHelper.java 2007-01-09 11:33:19 UTC (rev 11032)
@@ -72,7 +72,8 @@
togglesList.add( "-" + jdktarget ); // put this here so DAOs compile
togglesList.add( "-noExit" );
//togglesList.add( "-noWarn" );
- togglesList.add( "-warn:unusedImport,noEffectAssign,fieldHiding,localHiding,semicolon,uselessTypeCheck" ); // TODO: unused private
+ //togglesList.add( "-warn:unusedImport,noEffectAssign,fieldHiding,localHiding,semicolon,uselessTypeCheck" ); // TODO: unused private
+ togglesList.add( "-warn:unusedImport,noEffectAssign,fieldHiding,localHiding,semicolon" ); // TODO: unused private
togglesList.add( "-sourcepath" );
togglesList.add( srcdir.getAbsolutePath() + File.separatorChar );
togglesList.add( "-d" );
18 years
Hibernate SVN: r11031 - branches/Branch_3_2/HibernateExt/tools/etc
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2007-01-09 06:25:49 -0500 (Tue, 09 Jan 2007)
New Revision: 11031
Modified:
branches/Branch_3_2/HibernateExt/tools/etc/hibernate.properties
Log:
align
Modified: branches/Branch_3_2/HibernateExt/tools/etc/hibernate.properties
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/etc/hibernate.properties 2007-01-09 11:19:24 UTC (rev 11030)
+++ branches/Branch_3_2/HibernateExt/tools/etc/hibernate.properties 2007-01-09 11:25:49 UTC (rev 11031)
@@ -24,7 +24,7 @@
#hibernate.connection.driver_class org.h2.Driver
#hibernate.connection.username sa
#hibernate.connection.password
-#hibernate.connection.url jdbc:h2:testdb/h2test
+#hibernate.connection.url jdbc:h2:testdb/h2db;LOCK_MODE=0;TRACE_LEVEL_SYSTEM_OUT=2
#hibernate.connection.url jdbc:h2:mem:
#hibernate.connection.url jdbc:h2:tcp://dbserv:8084/sample;
#hibernate.connection.url jdbc:h2:ssl://secureserv:8085/sample;
@@ -40,7 +40,7 @@
#hibernate.connection.url jdbc:hsqldb:hsql://localhost
hibernate.connection.url jdbc:hsqldb:testdb/hsqldb;shutdown=true
#hibernate.connection.url jdbc:hsqldb:.
-hibernate.connection.shutdown=true
+#hibernate.connection.shutdown=true
## Derby
@@ -69,6 +69,8 @@
#hibernate.dialect org.hibernate.dialect.Oracle9Dialect
#hibernate.connection.driver_class oracle.jdbc.driver.OracleDriver
#hibernate.connection.url jdbc:oracle:thin:@localhost:1521:XE
+#hibernate.connection.username hr
+#hibernate.connection.password pluto123
## PostgreSQL
@@ -315,8 +317,8 @@
## specify a default schema and catalog for unqualified tablenames
+#hibernate.default_schema HR
hibernate.default_schema PUBLIC
-#hibernate.default_catalog PUBLIC
## set the maximum depth of the outer join fetch tree
@@ -436,3 +438,4 @@
#hibernate.jndi.class com.ibm.websphere.naming.WsnInitialContextFactory
#hibernate.jndi.url iiop://localhost:900/
+hibernatetool.metadatadialect org.hibernate.cfg.reveng.dialect.H2MetaDataDialect
\ No newline at end of file
18 years