Hibernate SVN: r11585 - in trunk/Hibernate3: code and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:11:07 -0400 (Tue, 29 May 2007)
New Revision: 11585
Added:
trunk/Hibernate3/code/cache-swarmcache/
Removed:
trunk/Hibernate3/cache-swarmcache/
Log:
split doc/code
Copied: trunk/Hibernate3/code/cache-swarmcache (from rev 11584, trunk/Hibernate3/cache-swarmcache)
17 years, 6 months
Hibernate SVN: r11584 - in trunk/Hibernate3: code and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:10:55 -0400 (Tue, 29 May 2007)
New Revision: 11584
Added:
trunk/Hibernate3/code/cache-oscache/
Removed:
trunk/Hibernate3/cache-oscache/
Log:
split doc/code
Copied: trunk/Hibernate3/code/cache-oscache (from rev 11583, trunk/Hibernate3/cache-oscache)
17 years, 6 months
Hibernate SVN: r11583 - in trunk/Hibernate3: code and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:10:43 -0400 (Tue, 29 May 2007)
New Revision: 11583
Added:
trunk/Hibernate3/code/cache-jbosscache2/
Removed:
trunk/Hibernate3/cache-jbosscache2/
Log:
split doc/code
Copied: trunk/Hibernate3/code/cache-jbosscache2 (from rev 11582, trunk/Hibernate3/cache-jbosscache2)
17 years, 6 months
Hibernate SVN: r11582 - in trunk/Hibernate3: code and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:10:31 -0400 (Tue, 29 May 2007)
New Revision: 11582
Added:
trunk/Hibernate3/code/cache-jbosscache/
Removed:
trunk/Hibernate3/cache-jbosscache/
Log:
split doc/code
Copied: trunk/Hibernate3/code/cache-jbosscache (from rev 11581, trunk/Hibernate3/cache-jbosscache)
17 years, 6 months
Hibernate SVN: r11581 - in trunk/Hibernate3: code and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:10:19 -0400 (Tue, 29 May 2007)
New Revision: 11581
Added:
trunk/Hibernate3/code/cache-ehcache/
Removed:
trunk/Hibernate3/cache-ehcache/
Log:
split doc/code
Copied: trunk/Hibernate3/code/cache-ehcache (from rev 11580, trunk/Hibernate3/cache-ehcache)
17 years, 6 months
Hibernate SVN: r11580 - trunk/Hibernate3.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-05-29 10:06:21 -0400 (Tue, 29 May 2007)
New Revision: 11580
Added:
trunk/Hibernate3/code/
Log:
split doc/code
17 years, 6 months
Hibernate SVN: r11579 - trunk/HibernateExt/search/doc/reference/en/modules.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-05-28 20:22:30 -0400 (Mon, 28 May 2007)
New Revision: 11579
Modified:
trunk/HibernateExt/search/doc/reference/en/modules/configuration.xml
Log:
HSEARCH-39 fix event description examples
Modified: trunk/HibernateExt/search/doc/reference/en/modules/configuration.xml
===================================================================
--- trunk/HibernateExt/search/doc/reference/en/modules/configuration.xml 2007-05-28 02:41:57 UTC (rev 11578)
+++ trunk/HibernateExt/search/doc/reference/en/modules/configuration.xml 2007-05-29 00:22:30 UTC (rev 11579)
@@ -351,7 +351,7 @@
</section>
</section>
- <section id="search-configuration-event" revision="1">
+ <section id="search-configuration-event" revision="2">
<title>Enabling automatic indexing</title>
<para>Automatic indexing is enable out of the box when using Hibernate
@@ -367,7 +367,7 @@
configuration is not useful with Hibernate Annotations or Hibernate
EntityManager.</para>
- <programlisting><hibernate-configuration>
+ <programlisting><session-factory>
...
<event type="post-update"
<listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
@@ -378,7 +378,7 @@
<event type="post-delete"
<listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
</event>
-</hibernate-configuration></programlisting>
+</session-factory></programlisting>
<para>Be sure to add the appropriate jar files in your classpath. Check
<literal>lib/README.TXT</literal> for the list of third party libraries. A
17 years, 6 months
Hibernate SVN: r11578 - trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-05-27 22:41:57 -0400 (Sun, 27 May 2007)
New Revision: 11578
Modified:
trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java
Log:
minor
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java 2007-05-28 02:38:03 UTC (rev 11577)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java 2007-05-28 02:41:57 UTC (rev 11578)
@@ -5,6 +5,7 @@
import org.hibernate.search.test.SearchTestCase;
import org.hibernate.search.SearchException;
+import org.hibernate.search.store.RAMDirectoryProvider;
import org.hibernate.Session;
import org.hibernate.dialect.Dialect;
import org.hibernate.cfg.AnnotationConfiguration;
@@ -23,6 +24,7 @@
for (int i = 0; i < getMappings().length; i++) {
cfg.addAnnotatedClass( getMappings()[i] );
}
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
try {
cfg.buildSessionFactory( /*new TestInterceptor()*/ );
fail("Undefined bridge went through");
17 years, 6 months
Hibernate SVN: r11577 - in trunk/HibernateExt/search/src: java/org/hibernate/search/query and 2 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-05-27 22:38:03 -0400 (Sun, 27 May 2007)
New Revision: 11577
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
Log:
HSEARCH-63 rename query.getResultSize() method
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-05-28 02:31:11 UTC (rev 11576)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-05-28 02:38:03 UTC (rev 11577)
@@ -35,7 +35,7 @@
* <code>list().size()</code> because list() if the index is
* not in sync with the database at the time of query.
*/
- int resultSize();
+ int getResultSize();
/**
* Defines the Database Query used to load the Lucene results.
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-05-28 02:31:11 UTC (rev 11576)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-05-28 02:38:03 UTC (rev 11577)
@@ -338,7 +338,7 @@
}
- public int resultSize() {
+ public int getResultSize() {
if (resultSize == null) {
//get result size without object initialization
SearchFactory searchFactory = ContextHelper.getSearchFactoryBySFI( session );
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2007-05-28 02:31:11 UTC (rev 11576)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2007-05-28 02:38:03 UTC (rev 11577)
@@ -100,7 +100,7 @@
boolean enabled = stats.isStatisticsEnabled();
if (!enabled) stats.setStatisticsEnabled( true );
FullTextQuery hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
- assertEquals( "Exection of resultSize without actual results", 2, hibQuery.resultSize() );
+ assertEquals( "Exection of getResultSize without actual results", 2, hibQuery.getResultSize() );
assertEquals( "No entity should be loaded", 0, stats.getEntityLoadCount() );
query = parser.parse( "summary:Festina Or brand:Seiko" );
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2007-05-28 02:31:11 UTC (rev 11576)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2007-05-28 02:38:03 UTC (rev 11577)
@@ -8,15 +8,11 @@
import java.util.List;
import org.hibernate.search.test.SearchTestCase;
-import org.hibernate.search.test.worker.Employee;
-import org.hibernate.search.test.worker.Employer;
-import org.hibernate.search.FullTextSession;
import org.hibernate.search.Environment;
import org.hibernate.search.Search;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.event.FullTextIndexEventListener;
import org.hibernate.search.store.FSDirectoryProvider;
-import org.hibernate.search.impl.FullTextSessionImpl;
import org.hibernate.SessionFactory;
import org.hibernate.Session;
import org.hibernate.Transaction;
@@ -138,11 +134,11 @@
new String[] {"name", "physicalDescription", "suspectCharge"},
new StandardAnalyzer() );
FullTextQuery query = getQuery( "John Doe", parser, s );
- assertTrue( query.resultSize() != 0 );
+ assertTrue( query.getResultSize() != 0 );
query = getQuery( "green", parser, s );
- random.nextInt( query.resultSize() - 15 );
- query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
query.list();
tx.commit();
@@ -152,10 +148,10 @@
tx = s.beginTransaction();
query = getQuery( "John Doe", parser, s );
- assertTrue( query.resultSize() != 0 );
+ assertTrue( query.getResultSize() != 0 );
query = getQuery( "thief", parser, s );
- int firstResult = random.nextInt( query.resultSize() - 15 );
+ int firstResult = random.nextInt( query.getResultSize() - 15 );
query.setFirstResult( firstResult );
query.setMaxResults( 10 );
List result = query.list();
@@ -200,11 +196,11 @@
new String[] {"name", "physicalDescription", "suspectCharge"},
new StandardAnalyzer() );
FullTextQuery query = getQuery( "John Doe", parser, s );
- assertTrue( query.resultSize() != 0 );
+ assertTrue( query.getResultSize() != 0 );
query = getQuery( "london", parser, s );
- random.nextInt( query.resultSize() - 15 );
- query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
query.list();
tx.commit();
@@ -214,11 +210,11 @@
tx = s.beginTransaction();
getQuery( "John Doe", parser, s );
- assertTrue( query.resultSize() != 0 );
+ assertTrue( query.getResultSize() != 0 );
query = getQuery( "green", parser, s );
- random.nextInt( query.resultSize() - 15 );
- query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
query.list();
tx.commit();
17 years, 6 months
Hibernate SVN: r11576 - in trunk/HibernateExt/search/src: java/org/hibernate/search/backend and 4 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-05-27 22:31:11 -0400 (Sun, 27 May 2007)
New Revision: 11576
Added:
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/NotSharedReaderProvider.java
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProvider.java
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderFactory.java
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderHelper.java
trunk/HibernateExt/search/src/java/org/hibernate/search/reader/SharedReaderProvider.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Detective.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/SharedReaderPerfTest.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Suspect.java
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java
trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkerFactory.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
Log:
HSEARCH-61 HSEARCH-62 HSEARCH-13
Support ReaderProvider custom strategy
Use MultiReader rather than MultiSearcher
Provide a shared strategy where individual index readers are cached per searchFactory. Only the valid (current) ones are returned
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java 2007-05-26 07:34:56 UTC (rev 11575)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -23,6 +23,7 @@
public static final String WORKER_SCOPE = WORKER_PREFIX + "scope";
public static final String WORKER_BACKEND = WORKER_PREFIX + "backend";
public static final String WORKER_EXECUTION = WORKER_PREFIX + "execution";
+
/**
* only used then execution is async
* Thread pool size
@@ -35,4 +36,13 @@
* default infinite
*/
public static final String WORKER_WORKQUEUE_SIZE = Environment.WORKER_PREFIX + "buffer_queue.max";
+
+ /**
+ * define the reader prefix
+ */
+ public static final String READER_PREFIX = "hibernate.search.reader.";
+ /**
+ * define the reader strategy used
+ */
+ public static final String READER_STRATEGY = READER_PREFIX + "strategy";
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java 2007-05-26 07:34:56 UTC (rev 11575)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/SearchFactory.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -16,10 +16,12 @@
import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
import org.hibernate.search.backend.Worker;
import org.hibernate.search.backend.WorkerFactory;
-import org.hibernate.search.backend.BackendQueueProcessorFactory;
import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.reader.ReaderProviderFactory;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.DirectoryProviderFactory;
import org.hibernate.util.ReflectHelper;
@@ -38,9 +40,9 @@
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
new HashMap<DirectoryProvider, ReentrantLock>();
private Worker worker;
+ private ReaderProvider readerProvider;
private BackendQueueProcessorFactory backendQueueProcessorFactory;
-
public BackendQueueProcessorFactory getBackendQueueProcessorFactory() {
return backendQueueProcessorFactory;
}
@@ -107,10 +109,8 @@
for ( DocumentBuilder builder : documentBuilders.values() ) {
builder.postInitialize( indexedClasses );
}
- WorkerFactory workerFactory = new WorkerFactory();
- workerFactory.configure( cfg, this );
- worker = workerFactory.createWorker();
-
+ worker = WorkerFactory.createWorker(cfg, this);
+ readerProvider = ReaderProviderFactory.createReaderProvider(cfg, this);
}
//code doesn't have to be multithreaded because SF creation is not.
@@ -143,6 +143,10 @@
return worker;
}
+ public ReaderProvider getReaderProvider() {
+ return readerProvider;
+ }
+
//not happy about having it as a helper class but I don't want cfg to be associated with the SearchFactory
public static ReflectionManager getReflectionManager(Configuration cfg) {
ReflectionManager reflectionManager;
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-05-26 07:34:56 UTC (rev 11575)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkerFactory.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -3,33 +3,20 @@
import java.util.Map;
import java.util.Properties;
-import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.cfg.Configuration;
-import org.hibernate.search.backend.impl.TransactionalWorker;
-import org.hibernate.search.engine.DocumentBuilder;
-import org.hibernate.search.SearchFactory;
-import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
-import org.hibernate.util.StringHelper;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.util.ReflectHelper;
+import org.hibernate.util.StringHelper;
/**
* @author Emmanuel Bernard
*/
-public class WorkerFactory {
- private Map<Class, DocumentBuilder<Object>> documentBuilders;
- private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders;
- private Configuration cfg;
- private SearchFactory searchFactory;
+public abstract class WorkerFactory {
- public void configure(Configuration cfg,
- SearchFactory searchFactory) {
- this.searchFactory = searchFactory;
- this.cfg = cfg;
- }
-
private static Properties getProperties(Configuration cfg) {
Properties props = cfg.getProperties();
Properties workerProperties = new Properties();
@@ -43,7 +30,7 @@
return workerProperties;
}
- public Worker createWorker() {
+ public static Worker createWorker(Configuration cfg, SearchFactory searchFactory) {
Properties props = getProperties( cfg );
String impl = props.getProperty( Environment.WORKER_SCOPE );
Worker worker;
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-05-26 07:34:56 UTC (rev 11575)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -18,34 +18,32 @@
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MultiSearcher;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.store.Directory;
+import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
-import org.hibernate.Criteria;
import org.hibernate.annotations.common.util.ReflectHelper;
-import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.query.ParameterMetadata;
import org.hibernate.impl.AbstractQueryImpl;
import org.hibernate.impl.CriteriaImpl;
-import org.hibernate.search.SearchFactory;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.DocumentExtractor;
+import org.hibernate.search.engine.EntityInfo;
import org.hibernate.search.engine.Loader;
import org.hibernate.search.engine.ObjectLoader;
-import org.hibernate.search.engine.QueryLoader;
-import org.hibernate.search.engine.EntityInfo;
import org.hibernate.search.engine.ProjectionLoader;
+import org.hibernate.search.engine.QueryLoader;
+import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.util.ContextHelper;
/**
@@ -96,7 +94,7 @@
SearchFactory searchFactory = ContextHelper.getSearchFactoryBySFI( session );
//find the directories
- Searcher searcher = buildSearcher( searchFactory );
+ IndexSearcher searcher = buildSearcher( searchFactory );
if ( searcher == null ) {
return new IteratorImpl( new ArrayList<EntityInfo>(0), noLoader );
}
@@ -121,9 +119,9 @@
}
finally {
try {
- searcher.close();
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (IOException e) {
+ catch( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -176,7 +174,7 @@
SearchFactory searchFactory = ContextHelper.getSearchFactoryBySFI( session );
//find the directories
- Searcher searcher = buildSearcher( searchFactory );
+ IndexSearcher searcher = buildSearcher( searchFactory );
//FIXME: handle null searcher
Hits hits;
try {
@@ -185,14 +183,14 @@
int max = max( first, hits );
DocumentExtractor extractor = new DocumentExtractor( searchFactory, projection );
Loader loader = getLoader( (Session) this.session, searchFactory );
- return new ScrollableResultsImpl( searcher, hits, first, max, extractor, loader);
+ return new ScrollableResultsImpl( searcher, hits, first, max, extractor, loader, searchFactory);
}
catch (IOException e) {
//close only in case of exception
try {
- if ( searcher != null ) searcher.close();
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (IOException ee) {
+ catch( SearchException ee ) {
//we have the initial issue already
}
throw new HibernateException( "Unable to query Lucene index", e );
@@ -207,7 +205,7 @@
public List list() throws HibernateException {
SearchFactory searchFactory = ContextHelper.getSearchFactoryBySFI( session );
//find the directories
- Searcher searcher = buildSearcher( searchFactory );
+ IndexSearcher searcher = buildSearcher( searchFactory );
if (searcher == null) return new ArrayList(0);
Hits hits;
try {
@@ -229,9 +227,9 @@
}
finally {
try {
- searcher.close();
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (IOException e) {
+ catch (SearchException e) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -293,14 +291,21 @@
0;
}
- //TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
- private Searcher buildSearcher(SearchFactory searchFactory) {
+
+ /**
+ * can return null
+ * TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
+ */
+ private IndexSearcher buildSearcher(SearchFactory searchFactory) {
Map<Class, DocumentBuilder<Object>> builders = searchFactory.getDocumentBuilders();
- Set<Directory> directories = new HashSet<Directory>();
+ List<DirectoryProvider> directories = new ArrayList<DirectoryProvider>();
if ( classes == null || classes.length == 0 ) {
//no class means all classes
for ( DocumentBuilder builder : builders.values() ) {
- directories.add( builder.getDirectoryProvider().getDirectory() );
+ final DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+ if ( ! directories.contains( directoryProvider ) ) {
+ directories.add( directoryProvider );
+ }
}
classesAndSubclasses = null;
}
@@ -315,40 +320,17 @@
DocumentBuilder builder = builders.get( clazz );
//TODO should we rather choose a polymorphic path and allow non mapped entities
if ( builder == null ) throw new HibernateException( "Not a mapped entity: " + clazz );
- directories.add( builder.getDirectoryProvider().getDirectory() );
+ final DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+ if ( ! directories.contains( directoryProvider ) ) {
+ directories.add( directoryProvider );
+ }
}
classesAndSubclasses = involvedClasses;
}
//set up the searcher
- Searcher searcher;
- int dirNbr = directories.size();
- if ( dirNbr > 1 ) {
- try {
- //TODO IndexSearcher on top of MultiReader
- IndexSearcher[] searchers = new IndexSearcher[dirNbr];
- Iterator<Directory> it = directories.iterator();
- for ( int index = 0; index < dirNbr; index++ ) {
- searchers[index] = new IndexSearcher( it.next() );
- }
- searcher = new MultiSearcher( searchers );
- }
- catch (IOException e) {
- throw new HibernateException( "Unable to read Lucene directory", e );
- }
- }
- else if ( dirNbr == 1 ) {
- try {
- searcher = new IndexSearcher( directories.iterator().next() );
- }
- catch (IOException e) {
- throw new HibernateException( "Unable to read Lucene directory", e );
- }
- }
- else {
- return null; //no indexed entity set up
- }
- return searcher;
+ final DirectoryProvider[] directoryProviders = directories.toArray( new DirectoryProvider[directories.size()] );
+ return new IndexSearcher( searchFactory.getReaderProvider().openReader( directoryProviders ) );
}
private void setResultSize(Hits hits) {
@@ -360,7 +342,7 @@
if (resultSize == null) {
//get result size without object initialization
SearchFactory searchFactory = ContextHelper.getSearchFactoryBySFI( session );
- Searcher searcher = buildSearcher( searchFactory );
+ IndexSearcher searcher = buildSearcher( searchFactory );
if (searcher == null) {
resultSize = 0;
}
@@ -368,6 +350,7 @@
Hits hits;
try {
hits = getHits( searcher );
+ resultSize = hits.length();
}
catch (IOException e) {
throw new HibernateException( "Unable to query Lucene index", e );
@@ -375,9 +358,9 @@
finally {
//searcher cannot be null
try {
- searcher.close();
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (IOException e) {
+ catch( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-05-26 07:34:56 UTC (rev 11575)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -8,19 +8,23 @@
import java.sql.Clob;
import java.util.Calendar;
import java.util.Date;
+import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
-import java.util.HashMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.lucene.document.Document;
import org.apache.lucene.search.Hits;
-import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.IndexSearcher;
import org.hibernate.HibernateException;
import org.hibernate.ScrollableResults;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
import org.hibernate.search.engine.DocumentExtractor;
+import org.hibernate.search.engine.EntityInfo;
import org.hibernate.search.engine.Loader;
-import org.hibernate.search.engine.EntityInfo;
import org.hibernate.type.Type;
/**
@@ -31,7 +35,9 @@
* @author Emmanuel Bernard
*/
public class ScrollableResultsImpl implements ScrollableResults {
- private final Searcher searcher;
+ private static Log log = LogFactory.getLog( ScrollableResultsImpl.class );
+ private final IndexSearcher searcher;
+ private final SearchFactory searchFactory;
private final Hits hits;
private final int first;
private final int max;
@@ -42,9 +48,11 @@
private Map<EntityInfo, Object[]> resultContext;
public ScrollableResultsImpl(
- Searcher searcher, Hits hits, int first, int max, DocumentExtractor extractor, Loader loader
+ IndexSearcher searcher, Hits hits, int first, int max, DocumentExtractor extractor,
+ Loader loader, SearchFactory searchFactory
) {
this.searcher = searcher;
+ this.searchFactory = searchFactory;
this.hits = hits;
this.first = first;
this.max = max;
@@ -52,7 +60,7 @@
this.loader = loader;
this.documentExtractor = extractor;
this.entityInfos = new EntityInfo[max - first + 1];
- this.resultContext = new HashMap<EntityInfo, Object[]>(max - first + 1);
+ this.resultContext = new HashMap<EntityInfo, Object[]>( max - first + 1 );
}
public boolean next() throws HibernateException {
@@ -96,10 +104,10 @@
public void close() throws HibernateException {
try {
- searcher.close();
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (IOException e) {
- throw new HibernateException( "Unable to close Lucene searcher", e );
+ catch (SearchException e) {
+ log.warn( "Unable to properly close searcher in ScrollableResults", e );
}
}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/reader/NotSharedReaderProvider.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/reader/NotSharedReaderProvider.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/reader/NotSharedReaderProvider.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,53 @@
+//$Id: $
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
+import static org.hibernate.search.reader.ReaderProviderHelper.buildMultiReader;
+import static org.hibernate.search.reader.ReaderProviderHelper.clean;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Open a reader each time
+ *
+ * @author Emmanuel Bernard
+ */
+public class NotSharedReaderProvider implements ReaderProvider {
+ public IndexReader openReader(DirectoryProvider[] directoryProviders) {
+ final int length = directoryProviders.length;
+ IndexReader[] readers = new IndexReader[length];
+ try {
+ for (int index = 0; index < length; index++) {
+ try {
+ readers[index] = IndexReader.open( directoryProviders[index].getDirectory() );
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ catch (IOException e) {
+ //TODO more contextual info
+ clean( new SearchException( "Unable to open one of the Lucene indexes", e ), readers );
+ }
+ return buildMultiReader( length, readers );
+ }
+
+
+ public void closeReader(IndexReader reader) {
+ try {
+ reader.close();
+ }
+ catch (IOException e) {
+ //TODO extract subReaders and close each one individually
+ clean( new SearchException( "Unable to close multiReader" ), reader );
+ }
+ }
+
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ }
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProvider.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProvider.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProvider.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,32 @@
+//$Id: $
+package org.hibernate.search.reader;
+
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Responsible for providing and managing the lifecycle of a read only reader
+ * <p/>
+ * Not that the reader must be closed once opened.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ReaderProvider {
+ /**
+ * Open a reader on all the listed directory providers
+ * the opened reader has to be closed through #closeReader()
+ * The opening can be virtual
+ */
+ IndexReader openReader(DirectoryProvider[] directoryProviders);
+
+ /**
+ * close a reader previously opened by #openReader
+ * The closing can be virtual
+ */
+ void closeReader(IndexReader reader);
+
+ void initialize(Properties props, SearchFactory searchFactory);
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderFactory.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderFactory.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,63 @@
+//$Id: $
+package org.hibernate.search.reader;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ReaderProviderFactory {
+
+ private static Properties getProperties(Configuration cfg) {
+ Properties props = cfg.getProperties();
+ Properties workerProperties = new Properties();
+ for (Map.Entry entry : props.entrySet()) {
+ String key = (String) entry.getKey();
+ if ( key.startsWith( Environment.READER_PREFIX ) ) {
+ workerProperties.setProperty( key, (String) entry.getValue() );
+ }
+ }
+ return workerProperties;
+ }
+
+ public static ReaderProvider createReaderProvider(Configuration cfg, SearchFactory searchFactory) {
+ Properties props = getProperties( cfg );
+ String impl = props.getProperty( Environment.READER_STRATEGY );
+ ReaderProvider readerProvider;
+ if ( StringHelper.isEmpty( impl ) ) {
+ //put another one
+ readerProvider = new NotSharedReaderProvider();
+ }
+ else if ( "not-shared".equalsIgnoreCase( impl ) ) {
+ readerProvider = new NotSharedReaderProvider();
+ }
+ else if ( "shared".equalsIgnoreCase( impl ) ) {
+ readerProvider = new SharedReaderProvider();
+ }
+ else {
+ try {
+ Class readerProviderClass = ReflectHelper.classForName( impl, ReaderProviderFactory.class );
+ readerProvider = (ReaderProvider) readerProviderClass.newInstance();
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find readerProvider class: " + impl, e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instanciate readerProvider class: " + impl, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instanciate readerProvider class: " + impl, e );
+ }
+ }
+ readerProvider.initialize( props, searchFactory );
+ return readerProvider;
+ }
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderHelper.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderHelper.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/reader/ReaderProviderHelper.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,53 @@
+//$Id: $
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.hibernate.search.SearchException;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ReaderProviderHelper {
+ public static IndexReader buildMultiReader(int length, IndexReader[] readers) {
+ if ( length == 0 ) {
+ return null;
+ }
+ else if ( length == 1 ) {
+ //everything should be the same so wrap in an MultiReader
+ //return readers[0];
+ try {
+ return new MultiReader( readers );
+ }
+ catch (IOException e) {
+ clean( new SearchException( "Unable to open a MultiReader", e ), readers );
+ return null; //never happen, but please the compiler
+ }
+ }
+ else {
+ try {
+ return new MultiReader( readers );
+ }
+ catch (IOException e) {
+ clean( new SearchException( "Unable to open a MultiReader", e ), readers );
+ return null; //never happen, but please the compiler
+ }
+ }
+ }
+
+ public static void clean(SearchException e, IndexReader... readers) {
+ for (IndexReader reader : readers) {
+ if ( reader != null ) {
+ try {
+ reader.close();
+ }
+ catch (IOException ee) {
+ //swallow
+ }
+ }
+ }
+ throw e;
+ }
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/reader/SharedReaderProvider.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/reader/SharedReaderProvider.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/reader/SharedReaderProvider.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,315 @@
+//$Id: $
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
+import static org.hibernate.search.reader.ReaderProviderHelper.buildMultiReader;
+import static org.hibernate.search.reader.ReaderProviderHelper.clean;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Share readers per SearchFactory, reusing them iff they are still valid.
+ *
+ * @author Emmanuel Bernard
+ */
+public class SharedReaderProvider implements ReaderProvider {
+ private static Field subReadersField;
+ private static Log log = LogFactory.getLog( SharedReaderProvider.class );
+ /**
+ * nonfair lock. Need to be acquired on indexReader acquisition or release (semaphore)
+ */
+ private Lock semaphoreIndexReaderLock = new ReentrantLock();
+ /**
+ * non fair list of locks to block per IndexReader only
+ * Locks have to be acquired at least for indexReader retrieval and switch
+ * ie for all activeSearchIndexReaders manipulation
+ * this map is read only after initialization, no need to synchronize
+ */
+ private Map<DirectoryProvider, Lock> perDirectoryProviderManipulationLocks;
+ /**
+ * Contain the active (ie non obsolete IndexReader for a given Directory
+ * There may be no entry (warm up)
+ * <p/>
+ * protected by semaphoreIndexReaderLock
+ */
+ private Map<DirectoryProvider, IndexReader> activeSearchIndexReaders = new HashMap<DirectoryProvider, IndexReader>();
+ /**
+ * contains the semaphore and the directory provider per IndexReader opened
+ * all read / update have to be protected by semaphoreIndexReaderLock
+ */
+ private Map<IndexReader, ReaderData> searchIndexReaderSemaphores = new HashMap<IndexReader, ReaderData>();
+
+ public IndexReader openReader(DirectoryProvider[] directoryProviders) {
+ boolean trace = log.isTraceEnabled();
+ int length = directoryProviders.length;
+ IndexReader[] readers = new IndexReader[length];
+ if ( trace ) log.trace( "Opening IndexReader for directoryProviders: " + length );
+
+ for (int index = 0; index < length; index++) {
+ DirectoryProvider directoryProvider = directoryProviders[index];
+ IndexReader reader;
+ Lock directoryProviderLock = perDirectoryProviderManipulationLocks.get( directoryProvider );
+ if ( trace ) log.trace( "Opening IndexReader from " + directoryProvider.getDirectory().toString() );
+ directoryProviderLock.lock(); //needed for same problem as the double-checked locking
+ try {
+ reader = activeSearchIndexReaders.get( directoryProvider );
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ if ( reader == null ) {
+ if ( trace )
+ log.trace( "No shared IndexReader, opening a new one: " + directoryProvider.getDirectory().toString() );
+ reader = replaceActiveReader( null, directoryProviderLock, directoryProvider, readers );
+ }
+ else {
+ boolean isCurrent;
+ try {
+ isCurrent = reader.isCurrent();
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to read current status of Lucene IndexReader", e );
+ }
+ if ( !isCurrent ) {
+ if ( trace )
+ log.trace( "Out of date shared IndexReader found, opening a new one: " + directoryProvider.getDirectory().toString() );
+ IndexReader outOfDateReader = reader;
+ reader = replaceActiveReader( outOfDateReader, directoryProviderLock, directoryProvider, readers );
+ }
+ else {
+ if ( trace )
+ log.trace( "Valid shared IndexReader: " + directoryProvider.getDirectory().toString() );
+ directoryProviderLock.lock();
+ try {
+ //read the latest active one, the current one could be out of date and closed already
+ //the latest active is guaranteed to be active because it's protected by the dp lock
+ reader = activeSearchIndexReaders.get( directoryProvider );
+ semaphoreIndexReaderLock.lock();
+ try {
+ SharedReaderProvider.ReaderData readerData = searchIndexReaderSemaphores.get( reader );
+ //TODO if readerData is null????
+ readerData.semaphore++;
+ searchIndexReaderSemaphores.put( reader, readerData ); //not necessary
+ if ( trace ) log.trace( "Semaphore increased: " + readerData.semaphore + " for " + reader );
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ }
+ }
+ readers[index] = reader;
+ }
+ return buildMultiReader( length, readers );
+ }
+
+ private IndexReader replaceActiveReader(IndexReader outOfDateReader, Lock directoryProviderLock, DirectoryProvider directoryProvider, IndexReader[] readers) {
+ boolean trace = log.isTraceEnabled();
+ IndexReader oldReader;
+ boolean closeOldReader = false;
+ boolean closeOutOfDateReader = false;
+ IndexReader reader;
+ /**
+ * Since out of lock protection, can have multiple readers created in //
+ * not worse than NotShared and limit the locking time, hence scalability
+ */
+ try {
+ reader = IndexReader.open( directoryProvider.getDirectory() );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to open Lucene IndexReader", e );
+ }
+ directoryProviderLock.lock();
+ try {
+ //since not protected by a lock, other ones can have been added
+ oldReader = activeSearchIndexReaders.put( directoryProvider, reader );
+ semaphoreIndexReaderLock.lock();
+ try {
+ searchIndexReaderSemaphores.put( reader, new ReaderData( 1, directoryProvider ) );
+ if ( trace ) log.trace( "Semaphore: 1 for " + reader );
+ if ( outOfDateReader != null ) {
+ ReaderData readerData = searchIndexReaderSemaphores.get( outOfDateReader );
+ if ( readerData == null ) {
+ closeOutOfDateReader = false; //already removed by another prevous thread
+ }
+ else if ( readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( outOfDateReader );
+ closeOutOfDateReader = true;
+ }
+ else {
+ closeOutOfDateReader = false;
+ }
+ }
+
+ if ( oldReader != null && oldReader != outOfDateReader ) {
+ ReaderData readerData = searchIndexReaderSemaphores.get( oldReader );
+ if ( readerData == null ) {
+ log.warn( "Semaphore should not be null" );
+ closeOldReader = true; //TODO should be true or false?
+ }
+ else if ( readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( oldReader );
+ closeOldReader = true;
+ }
+ else {
+ closeOldReader = false;
+ }
+ }
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ if ( closeOutOfDateReader ) {
+ if ( trace ) log.trace( "Closing out of date IndexReader " + outOfDateReader );
+ try {
+ outOfDateReader.close();
+ }
+ catch (IOException e) {
+ clean( new SearchException( "Unable to close Lucene IndexReader", e ), readers );
+ }
+ }
+ if ( closeOldReader ) {
+ if ( trace ) log.trace( "Closing old IndexReader " + oldReader );
+ try {
+ oldReader.close();
+ }
+ catch (IOException e) {
+ clean( new SearchException( "Unable to close Lucene IndexReader", e ), readers );
+ }
+ }
+ return reader;
+ }
+
+ public void closeReader(IndexReader reader) {
+ boolean trace = log.isTraceEnabled();
+ if ( reader == null ) return;
+ IndexReader[] readers;
+ if ( reader instanceof MultiReader ) {
+ try {
+ readers = (IndexReader[]) subReadersField.get( reader );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Incompatible version of Lucene: MultiReader.subReaders not accessible", e );
+ }
+ if ( trace ) log.trace( "Closing MultiReader: " + reader );
+ }
+ else {
+ throw new AssertionFailure( "Everything should be wrapped in a MultiReader" );
+ }
+
+ for (IndexReader subReader : readers) {
+ ReaderData readerData;
+ //TODO can we avoid that lock?
+ semaphoreIndexReaderLock.lock();
+ try {
+ readerData = searchIndexReaderSemaphores.get( subReader );
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+
+ if ( readerData == null ) {
+ log.error( "Trying to close a Lucene IndexReader not present: " + subReader.directory().toString() );
+ //TODO should we try to close?
+ continue;
+ }
+
+ //acquire the locks in the same order as everywhere else
+ Lock directoryProviderLock = perDirectoryProviderManipulationLocks.get( readerData.provider );
+ boolean closeReader = false;
+ directoryProviderLock.lock();
+ try {
+ boolean isActive;
+ isActive = activeSearchIndexReaders.get( readerData.provider ) == subReader;
+ if ( trace ) log.trace( "Indexreader not active: " + subReader );
+ semaphoreIndexReaderLock.lock();
+ try {
+ readerData = searchIndexReaderSemaphores.get( subReader );
+ if ( readerData == null ) {
+ log.error( "Trying to close a Lucene IndexReader not present: " + subReader.directory().toString() );
+ //TODO should we try to close?
+ continue;
+ }
+ readerData.semaphore--;
+ if ( trace ) log.trace( "Semaphore decreased to: " + readerData.semaphore + " for " + subReader );
+ if ( readerData.semaphore < 0 )
+ log.error( "Semaphore negative: " + subReader.directory().toString() );
+ if ( ( !isActive ) && readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( subReader );
+ closeReader = true;
+ }
+ else {
+ closeReader = false;
+ }
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+
+ if ( closeReader ) {
+ if ( trace ) log.trace( "Closing IndexReader: " + subReader );
+ try {
+ subReader.close();
+ }
+ catch (IOException e) {
+ log.warn( "Unable to close Lucene IndexReader", e );
+ }
+ }
+ }
+ }
+
+ public void initialize(Properties props, SearchFactory searchFactory) {
+ if ( subReadersField == null ) {
+ try {
+ subReadersField = MultiReader.class.getDeclaredField( "subReaders" );
+ if ( !subReadersField.isAccessible() ) subReadersField.setAccessible( true );
+ }
+ catch (NoSuchFieldException e) {
+ throw new SearchException( "Incompatible version of Lucene: MultiReader.subReaders not accessible", e );
+ }
+ }
+ Set<DirectoryProvider> providers = searchFactory.getLockableDirectoryProviders().keySet();
+ perDirectoryProviderManipulationLocks = new HashMap<DirectoryProvider, Lock>( providers.size() );
+ for (DirectoryProvider dp : providers) {
+ perDirectoryProviderManipulationLocks.put( dp, new ReentrantLock() );
+ }
+ perDirectoryProviderManipulationLocks = Collections.unmodifiableMap( perDirectoryProviderManipulationLocks );
+ }
+
+ private class ReaderData {
+
+ public ReaderData(int semaphore, DirectoryProvider provider) {
+ this.semaphore = semaphore;
+ this.provider = provider;
+ }
+
+ public int semaphore;
+ public DirectoryProvider provider;
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Detective.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Detective.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Detective.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,62 @@
+//$Id: $
+package org.hibernate.search.test.reader;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Detective {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.TOKENIZED)
+ private String physicalDescription;
+ @Field(index = Index.UN_TOKENIZED)
+ private String badge;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPhysicalDescription() {
+ return physicalDescription;
+ }
+
+ public void setPhysicalDescription(String physicalDescription) {
+ this.physicalDescription = physicalDescription;
+ }
+
+ public String getBadge() {
+ return badge;
+ }
+
+ public void setBadge(String badge) {
+ this.badge = badge;
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,19 @@
+//$Id: $
+package org.hibernate.search.test.reader;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.Environment;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class NotSharedReaderPerfTest extends ReaderPerfTestCase {
+ protected void configure(Configuration cfg) {
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( "hibernate.search.default.indexBase", "./indextemp" );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( Environment.READER_STRATEGY, "not-shared" );
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,251 @@
+//$Id: $
+package org.hibernate.search.test.reader;
+
+import java.io.File;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.Random;
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.worker.Employee;
+import org.hibernate.search.test.worker.Employer;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Environment;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.SessionFactory;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.event.PostInsertEventListener;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.search.Query;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ReaderPerfTestCase extends SearchTestCase {
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for ( File file : files ) {
+ if ( file.isDirectory() ) {
+ delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Detective.class,
+ Suspect.class
+ };
+ }
+
+ private File getBaseIndexDir() {
+ File current = new File( "." );
+ File sub = new File( current, "indextemp" );
+ return sub;
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ delete( sub );
+ }
+
+ private void delete(File sub) {
+ if ( sub.isDirectory() ) {
+ for ( File file : sub.listFiles() ) {
+ delete( file );
+ }
+ sub.delete();
+ }
+ else {
+ sub.delete();
+ }
+ }
+
+ public boolean insert = true;
+
+ public void testConcurrency() throws Exception {
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+ for ( int index = 0 ; index < 5000 ; index++ ) {
+ Detective detective = new Detective();
+ detective.setName( "John Doe " + index );
+ detective.setBadge( "123455" + index );
+ detective.setPhysicalDescription( "Blond green eye etc etc");
+ s.persist( detective );
+ Suspect suspect = new Suspect();
+ suspect.setName( "Jane Doe " + index );
+ suspect.setPhysicalDescription( "brunette, short, 30-ish");
+ if ( index % 20 == 0 ) {
+ suspect.setSuspectCharge( "thief liar " );
+ }
+ else {
+ suspect.setSuspectCharge( " It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery. Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud.");
+ }
+ s.persist( suspect );
+ }
+ tx.commit();
+ s.close();
+
+ Thread.sleep( 1000 );
+
+ int nThreads = 15;
+ ExecutorService es = Executors.newFixedThreadPool( nThreads );
+ Work work = new Work( getSessions() );
+ ReverseWork reverseWork = new ReverseWork( getSessions() );
+ long start = System.currentTimeMillis();
+ int iteration = 100;
+ for ( int i = 0; i < iteration; i++ ) {
+ es.execute( work );
+ es.execute( reverseWork );
+ }
+ while ( work.count < iteration - 1 ) {
+ Thread.sleep( 20 );
+ }
+ System.out.println( iteration + " iterations in " + nThreads + " threads: " + ( System
+ .currentTimeMillis() - start ) );
+ }
+
+ protected class Work implements Runnable {
+ private Random random = new Random( );
+ private SessionFactory sf;
+ public volatile int count = 0;
+
+ public Work(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new MultiFieldQueryParser(
+ new String[] {"name", "physicalDescription", "suspectCharge"},
+ new StandardAnalyzer() );
+ FullTextQuery query = getQuery( "John Doe", parser, s );
+ assertTrue( query.resultSize() != 0 );
+
+ query = getQuery( "green", parser, s );
+ random.nextInt( query.resultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+
+ query = getQuery( "John Doe", parser, s );
+ assertTrue( query.resultSize() != 0 );
+
+ query = getQuery( "thief", parser, s );
+ int firstResult = random.nextInt( query.resultSize() - 15 );
+ query.setFirstResult( firstResult );
+ query.setMaxResults( 10 );
+ List result = query.list();
+ Object object = result.get(0);
+ if (insert && object instanceof Detective) {
+ Detective detective = (Detective) object;
+ detective.setPhysicalDescription( detective.getPhysicalDescription() + " Eye" + firstResult );
+ }
+ else if (insert && object instanceof Suspect) {
+ Suspect suspect = (Suspect) object;
+ suspect.setPhysicalDescription( suspect.getPhysicalDescription() + " Eye" + firstResult );
+ }
+ tx.commit();
+ s.close();
+ count++;
+ }
+
+ private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
+ Query luceneQuery = null;
+ try {
+ luceneQuery = parser.parse(queryString);
+ }
+ catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return Search.createFullTextSession( s ).createFullTextQuery( luceneQuery );
+ }
+ }
+
+ protected class ReverseWork implements Runnable {
+ private SessionFactory sf;
+ private Random random = new Random();
+
+ public ReverseWork(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new MultiFieldQueryParser(
+ new String[] {"name", "physicalDescription", "suspectCharge"},
+ new StandardAnalyzer() );
+ FullTextQuery query = getQuery( "John Doe", parser, s );
+ assertTrue( query.resultSize() != 0 );
+
+ query = getQuery( "london", parser, s );
+ random.nextInt( query.resultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+
+ getQuery( "John Doe", parser, s );
+ assertTrue( query.resultSize() != 0 );
+
+ query = getQuery( "green", parser, s );
+ random.nextInt( query.resultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.resultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+ }
+
+ private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
+ Query luceneQuery = null;
+ try {
+ luceneQuery = parser.parse(queryString);
+ }
+ catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return Search.createFullTextSession( s ).createFullTextQuery( luceneQuery );
+ }
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.Clock.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ FullTextIndexEventListener del = new FullTextIndexEventListener();
+ cfg.getEventListeners().setPostDeleteEventListeners( new PostDeleteEventListener[]{del} );
+ cfg.getEventListeners().setPostUpdateEventListeners( new PostUpdateEventListener[]{del} );
+ cfg.getEventListeners().setPostInsertEventListeners( new PostInsertEventListener[]{del} );
+ }
+
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/SharedReaderPerfTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/SharedReaderPerfTest.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/SharedReaderPerfTest.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,20 @@
+//$Id: $
+package org.hibernate.search.test.reader;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.Environment;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SharedReaderPerfTest extends ReaderPerfTestCase {
+ protected void configure(Configuration cfg) {
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( "hibernate.search.default.indexBase", "./indextemp" );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( Environment.READER_STRATEGY, "shared" );
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Suspect.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Suspect.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/reader/Suspect.java 2007-05-28 02:31:11 UTC (rev 11576)
@@ -0,0 +1,63 @@
+//$Id: $
+package org.hibernate.search.test.reader;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Suspect {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.TOKENIZED)
+ private String physicalDescription;
+ @Field(index = Index.TOKENIZED)
+ private String suspectCharge;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPhysicalDescription() {
+ return physicalDescription;
+ }
+
+ public void setPhysicalDescription(String physicalDescription) {
+ this.physicalDescription = physicalDescription;
+ }
+
+ public String getSuspectCharge() {
+ return suspectCharge;
+ }
+
+ public void setSuspectCharge(String suspectCharge) {
+ this.suspectCharge = suspectCharge;
+ }
+}
17 years, 6 months