Author: sannegrinovero
Date: 2009-02-01 08:22:19 -0500 (Sun, 01 Feb 2009)
New Revision: 15843
Added:
search/trunk/src/test/org/hibernate/search/test/query/ScrollableResultsTest.java
search/trunk/src/test/org/hibernate/search/test/util/FullTextSessionBuilder.java
Modified:
search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
search/trunk/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
Log:
HSEARCH-338, HSEARCH-339, HSEARCH-340, HSEARCH-310
Rewritten part of ScrollableResultsImpl to solve the listed issues (start at position -1,
memory leaks, performance improvements in backwards and random scrolling, guarantee
returned objects are attached to Session)
Modified: search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2009-01-31
06:36:02 UTC (rev 15842)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2009-02-01
13:22:19 UTC (rev 15843)
@@ -239,7 +239,7 @@
);
Loader loader = getLoader( ( Session ) this.session, searchFactory );
return new ScrollableResultsImpl(
- searcher, first, max, fetchSize, extractor, loader, searchFactory
+ searcher, first, max, fetchSize, extractor, loader, searchFactory, this.session
);
}
catch ( IOException e ) {
@@ -334,7 +334,7 @@
}
/**
- * Execute the lucene search and return the machting hits.
+ * Execute the lucene search and return the matching hits.
*
* @param searcher The index searcher.
* @param n Numer of documents to retrieve
Modified: search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2009-01-31
06:36:02 UTC (rev 15842)
+++ search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2009-02-01
13:22:19 UTC (rev 15843)
@@ -2,6 +2,8 @@
package org.hibernate.search.query;
import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Blob;
@@ -9,10 +11,8 @@
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
-import java.util.HashMap;
import java.util.List;
import java.util.Locale;
-import java.util.Map;
import java.util.TimeZone;
import org.apache.lucene.search.IndexSearcher;
@@ -20,6 +20,7 @@
import org.hibernate.HibernateException;
import org.hibernate.ScrollableResults;
+import org.hibernate.engine.SessionImplementor;
import org.hibernate.search.SearchException;
import org.hibernate.search.SearchFactory;
import org.hibernate.search.engine.DocumentExtractor;
@@ -50,82 +51,76 @@
*
* @author Emmanuel Bernard
* @author John Griffin
+ * @author Sanne Grinovero
*/
public class ScrollableResultsImpl implements ScrollableResults {
+
private static final Logger log = LoggerFactory.make();
+
private final SearchFactory searchFactory;
private final IndexSearcher searcher;
private final int first;
private final int max;
private final int fetchSize;
- private int current;
- private final EntityInfo[] entityInfos;
private final Loader loader;
private final DocumentExtractor documentExtractor;
- private final Map<EntityInfo, Object[]> resultContext;
+ private final SessionImplementor session;
+
+ /**
+ * Caches result rows and EntityInfo from
+ * <code>first</code> to <code>max</code>
+ */
+ private final LoadedObject[] resultsContext;
+
+ private int current;
public ScrollableResultsImpl( IndexSearcher searcher, int first, int max, int fetchSize,
DocumentExtractor extractor,
- Loader loader, SearchFactory searchFactory
+ Loader loader, SearchFactory searchFactory, SessionImplementor sessionImplementor
) {
this.searchFactory = searchFactory;
this.searcher = searcher;
this.first = first;
this.max = max;
- this.current = first;
this.loader = loader;
this.documentExtractor = extractor;
- int size = max - first + 1 > 0 ? max - first + 1 : 0;
- this.entityInfos = new EntityInfo[size];
- this.resultContext = new HashMap<EntityInfo, Object[]>( size );
this.fetchSize = fetchSize;
+ this.session = sessionImplementor;
+ int size = Math.max( max - first + 1, 0 );
+ this.resultsContext = new LoadedObject[size];
+ beforeFirst();
}
- // The 'cache' is a sliding window of size fetchSize that
- // moves back and forth over entityInfos as directed loading
- // values as necessary.
- private EntityInfo loadCache(int windowStart) {
- int windowStop;
-
- EntityInfo info = entityInfos[windowStart - first];
- if ( info != null ) {
- //data has already been loaded
- return info;
+ private LoadedObject ensureCurrentLoaded() {
+ LoadedObject currentCacheRef = resultsContext[current - first];
+ if ( currentCacheRef != null ) {
+ return currentCacheRef;
}
-
- if ( windowStart + fetchSize > max ) {
- windowStop = max;
- }
- else {
- windowStop = windowStart + fetchSize - 1;
- }
-
- List<EntityInfo> entityInfosLoaded = new ArrayList<EntityInfo>( windowStop
- windowStart + 1 );
- for (int x = windowStart; x <= windowStop; x++) {
- try {
- if ( entityInfos[x - first] == null ) {
- //FIXME should check that clazz match classes but this complicates a lot the
firstResult/maxResult
- entityInfos[x - first] = documentExtractor.extract( x );
- entityInfosLoaded.add( entityInfos[x - first] );
- }
+ // the loading window is optimized for scrolling in both directions:
+ int windowStop = Math.min( max + 1 , current + fetchSize );
+ int windowStart = Math.max( first, current - fetchSize + 1 );
+ List<EntityInfo> entityInfosToLoad = new ArrayList<EntityInfo>( fetchSize
);
+ int sizeToLoad = 0;
+ for (int x = windowStart; x < windowStop; x++) {
+ int arrayIdx = x - first;
+ LoadedObject lo = resultsContext[arrayIdx];
+ if ( lo == null ) {
+ lo = new LoadedObject();
+ // makes hard references and extract EntityInfos:
+ entityInfosToLoad.add( lo.getEntityInfo( x ) );
+ resultsContext[arrayIdx] = lo;
+ sizeToLoad++;
+ if ( sizeToLoad >= fetchSize )
+ break;
}
- catch (IOException e) {
- throw new HibernateException( "Unable to read Lucene topDocs[" + x +
"]", e );
- }
-
}
- //preload efficiently first
- loader.load( entityInfosLoaded.toArray( new EntityInfo[entityInfosLoaded.size()] ) );
- //load one by one to inject null results if needed
- for (EntityInfo slidingInfo : entityInfosLoaded) {
- if ( !resultContext.containsKey( slidingInfo ) ) {
- Object loaded = loader.load( slidingInfo );
- if ( !loaded.getClass().isArray() ) loaded = new Object[] { loaded };
- resultContext.put( slidingInfo, (Object[]) loaded );
- }
+ //preload efficiently by batches:
+ if ( sizeToLoad > 1 ) {
+ loader.load( entityInfosToLoad.toArray( new EntityInfo[sizeToLoad] ) );
+ //(no references stored at this point: they still need to be loaded one by one to
inject null results)
}
- return entityInfos[windowStart - first];
+ return resultsContext[ current - first ];
}
-
+
/**
* {@inheritDoc}
*/
@@ -193,6 +188,7 @@
public void afterLast() {
current = max + 1;
+ //TODO help gc by clearing all structures when using forwardonly scrollmode.
}
public boolean isFirst() {
@@ -220,8 +216,8 @@
// do that since we have to make up for
// an Object[]. J.G
if ( current < first || current > max ) return null;
- loadCache( current );
- return resultContext.get( entityInfos[current - first] );
+ LoadedObject cacheEntry = ensureCurrentLoaded();
+ return cacheEntry.getManagedResult( current );
}
/**
@@ -406,4 +402,70 @@
}
return current >= first && current <= max;
}
+
+ private final class LoadedObject {
+
+ private Reference<Object[]> entity; //never==null but Reference.get can return
null
+ private Reference<EntityInfo> einfo; //never==null but Reference.get can return
null
+
+ /**
+ * Gets the objects from cache if it is available and attached to session,
+ * or reload them and update the cache entry.
+ * @param x absolute position in fulltext result.
+ * @return the managed objects
+ */
+ private Object[] getManagedResult(int x) {
+ EntityInfo entityInfo = getEntityInfo( x );
+ Object[] objects = entity==null ? null : entity.get();
+ if ( objects!=null && areAllEntitiesManaged( objects, entityInfo ) ) {
+ return objects;
+ }
+ else {
+ Object loaded = loader.load( entityInfo );
+ if ( ! loaded.getClass().isArray() ) loaded = new Object[] { loaded };
+ objects = (Object[]) loaded;
+ this.entity = new SoftReference<Object[]>( objects );
+ return objects;
+ }
+ }
+
+ /**
+ * Extract an entityInfo, either from cache or from the index.
+ * @param x the position in the index.
+ * @return
+ */
+ private EntityInfo getEntityInfo(int x) {
+ EntityInfo entityInfo = einfo==null ? null : einfo.get();
+ if ( entityInfo==null ) {
+ try {
+ entityInfo = documentExtractor.extract( x );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to read Lucene topDocs[" + x +
"]", e );
+ }
+ einfo = new SoftReference<EntityInfo>( entityInfo );
+ }
+ return entityInfo;
+ }
+
+ }
+
+ private boolean areAllEntitiesManaged(Object[] objects, EntityInfo entityInfo) {
+ //check if all entities are session-managed and skip the check on projected values
+ org.hibernate.Session hibSession = (org.hibernate.Session) session;
+ if ( entityInfo.projection != null ) {
+ // using projection: test only for entities
+ for ( int idx : entityInfo.indexesOfThis ) {
+ Object o = objects[idx];
+ //TODO improve: is it useful to check for proxies and have them reassociated to
persistence context?
+ if ( ! hibSession.contains( o ) )
+ return false;
+ }
+ return true;
+ }
+ else {
+ return hibSession.contains( objects[0] );
+ }
+ }
+
}
Modified: search/trunk/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2009-01-31
06:36:02 UTC (rev 15842)
+++ search/trunk/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2009-02-01
13:22:19 UTC (rev 15843)
@@ -34,7 +34,7 @@
s.save( clock );
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
- book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
+ book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
s.save( book );
tx.commit();
s.clear();
@@ -89,7 +89,7 @@
s.save( clock );
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
- book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
+ book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
s.save( book );
tx.commit();
s.clear();
@@ -125,7 +125,7 @@
s.save( clock );
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
- book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
+ book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
s.save( book );
tx.commit();
s.clear();
@@ -171,7 +171,7 @@
s.save( clock );
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
- book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
+ book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
s.save( book );
tx.commit();//post commit events for lucene
s.clear();
@@ -217,7 +217,7 @@
s.save( clock );
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
- book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
+ book = new Book( 2, "La gloire de mon p�re", "Les deboires de mon p�re
en v�lo" );
s.save( book );
tx.commit();//post commit events for lucene
s.clear();
@@ -235,7 +235,7 @@
query = parser.parse( "summary:Festina Or brand:Seiko" );
hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
result = hibQuery.scroll();
- assertEquals( 0, result.getRowNumber() );
+ assertEquals( -1, result.getRowNumber() );
result.beforeFirst();
assertEquals( true, result.next() );
assertTrue( result.isFirst() );
Added: search/trunk/src/test/org/hibernate/search/test/query/ScrollableResultsTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/query/ScrollableResultsTest.java
(rev 0)
+++
search/trunk/src/test/org/hibernate/search/test/query/ScrollableResultsTest.java 2009-02-01
13:22:19 UTC (rev 15843)
@@ -0,0 +1,218 @@
+//$Id
+package org.hibernate.search.test.query;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+import junit.framework.TestCase;
+
+/**
+ * Test for org.hibernate.search.query.ScrollableResultsImpl
+ *
+ * @see org.hibernate.search.query.ScrollableResultsImpl
+ * @author Sanne Grinovero
+ */
+public class ScrollableResultsTest extends TestCase {
+
+ private FullTextSessionBuilder builder;
+ private FullTextSession sess;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ builder = new FullTextSessionBuilder();
+ sess = builder
+ .addAnnotatedClass( AlternateBook.class )
+ .addAnnotatedClass( Employee.class )
+ .setProperty( "hibernate.default_batch_fetch_size", "10" )
+ .build();
+ Transaction tx = sess.beginTransaction();
+ //create some entities to query:
+ for ( int i = 0; i < 324; i++ ) {
+ sess.persist( new AlternateBook( i , "book about the number " + i ) );
+ }
+ for ( int i = 0; i < 133; i++ ) {
+ sess.persist( new Employee( i , "Rossi", "dept. num. " + i ) );
+ }
+ tx.commit();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ builder.close();
+ super.tearDown();
+ }
+
+ /**
+ * Test forward scrolling using pagination
+ */
+ public void testScrollingForward() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary" ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .setFirstResult( 20 )
+ .setMaxResults( 111 )
+ .scroll();
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ assertTrue( scrollableResults.last() );
+ assertEquals( 110, scrollableResults.getRowNumber() );
+ scrollableResults.beforeFirst();
+ int position = scrollableResults.getRowNumber();
+ while ( scrollableResults.next() ) {
+ position++;
+ int bookId = position + 20;
+ assertEquals( position, scrollableResults.getRowNumber() );
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertEquals( bookId, book.getId().intValue() );
+ assertEquals( "book about the number " + bookId, book.getSummary() );
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( 110, position );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+ /**
+ * Verify inverse-order scrolling.
+ * TODO to verify correct FetchSize behavior I've been debugging
+ * the behavior; we should add a mock library to automate this kind of tests.
+ */
+ public void testScrollingBackwards() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary" ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .scroll();
+ scrollableResults.beforeFirst();
+ // initial position should be -1 as in Hibernate Core
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ assertTrue( scrollableResults.last() );
+ int position = scrollableResults.getRowNumber();
+ assertEquals( 323, position );
+ while ( scrollableResults.previous() ) {
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertEquals( --position, book.getId().intValue() );
+ assertEquals( "book about the number " + position, book.getSummary() );
+ }
+ assertEquals( 0, position );
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+ /**
+ * Test that all entities returned by a ScrollableResults
+ * are always attached to Session
+ */
+ public void testResultsAreManaged() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary" ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .scroll();
+ int position = -1;
+ while ( scrollableResults.next() ) {
+ position++;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ // evict some entities:
+ if ( position % 3 == 0 ) {
+ sess.evict( book );
+ assertFalse( sess.contains( book ) );
+ }
+ }
+ //verifies it did scroll to the end:
+ assertEquals( 323, position );
+ //assert the entities are re-attached after eviction:
+ while ( scrollableResults.previous() ) {
+ position--;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( -1, position );
+ sess.clear();
+ //assert the entities are re-attached after Session.clear:
+ while ( scrollableResults.next() ) {
+ position++;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( 323, position );
+ tx.commit();
+ }
+
+ /**
+ * Verify scrolling works correctly when combined with Projection
+ * and that the projected entities are managed, even in case
+ * of evict usage for memory management.
+ */
+ public void testScrollProjectionAndManaged() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "dept", "num") );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, Employee.class )
+ .setProjection(
+ FullTextQuery.OBJECT_CLASS,
+ FullTextQuery.ID,
+ FullTextQuery.THIS,
+ "lastname",
+ FullTextQuery.THIS
+ )
+ .setFetchSize( 10 )
+ .scroll();
+ scrollableResults.last();
+ assertEquals( 132, scrollableResults.getRowNumber() );
+ scrollableResults.beforeFirst();
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ int position = scrollableResults.getRowNumber();
+ while ( scrollableResults.next() ) {
+ position++;
+ Object[] objs = scrollableResults.get();
+ assertEquals( Employee.class, objs[0] );
+ assertEquals( position, objs[1] );
+ assertTrue( objs[2] instanceof Employee );
+ sess.contains( objs[2] );
+ assertEquals( "Rossi", objs[3] );
+ assertTrue( objs[4] instanceof Employee );
+ sess.contains( objs[4] );
+ assertTrue( objs[2]==objs[4] ); //projected twice the same entity
+ // detach some objects:
+ if ( position%3 == 0 ) {
+ sess.evict( objs[2] );
+ }
+ }
+ //verify we scrolled to the end:
+ assertEquals( 132, position );
+ // and now the other way around, checking entities are attached again:
+ while ( scrollableResults.previous() ) {
+ position--;
+ Object[] objs = scrollableResults.get();
+ assertTrue( objs[2] instanceof Employee );
+ sess.contains( objs[2] );
+ assertTrue( objs[4] instanceof Employee );
+ sess.contains( objs[4] );
+ assertTrue( objs[2]==objs[4] );
+ }
+ assertEquals( -1, position );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+}
Property changes on:
search/trunk/src/test/org/hibernate/search/test/query/ScrollableResultsTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/src/test/org/hibernate/search/test/util/FullTextSessionBuilder.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/util/FullTextSessionBuilder.java
(rev 0)
+++
search/trunk/src/test/org/hibernate/search/test/util/FullTextSessionBuilder.java 2009-02-01
13:22:19 UTC (rev 15843)
@@ -0,0 +1,105 @@
+//$Id
+package org.hibernate.search.test.util;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.RAMDirectoryProvider;
+
+/**
+ * Use the builder pattern to provide a SessionFactory.
+ * This is meant to use only ram-based index and databases, for those test
+ * which need to use several differently configured SessionFactories.
+ *
+ * @author Sanne Grinovero
+ */
+public class FullTextSessionBuilder {
+
+ private AnnotationConfiguration cfg = new AnnotationConfiguration();
+ private SessionFactory sessionFactory;
+ private Session session;
+
+ public FullTextSessionBuilder() {
+ cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
+ //DB type:
+ cfg.setProperty( Environment.URL, "jdbc:hsqldb:mem:." );
+ cfg.setProperty( Environment.DRIVER,
+ org.hsqldb.jdbcDriver.class.getCanonicalName() );
+ cfg.setProperty( Environment.DIALECT,
+ org.hibernate.dialect.HSQLDialect.class.getCanonicalName() );
+ //connection:
+ cfg.setProperty( Environment.USER, "sa" );
+ cfg.setProperty( Environment.PASS, "" );
+ cfg.setProperty( Environment.ISOLATION, "2" );
+ cfg.setProperty( Environment.POOL_SIZE, "1" );
+ cfg.setProperty( Environment.ORDER_UPDATES, "true" );
+ //cache:
+ cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" );
+ cfg.setProperty( Environment.CACHE_PROVIDER,
+ org.hibernate.cache.HashtableCacheProvider.class.getCanonicalName() );
+ cfg.setProperty( Environment.USE_QUERY_CACHE, "true" );
+ //debugging/logging:
+ cfg.setProperty( Environment.SHOW_SQL, "false" );
+ cfg.setProperty( Environment.USE_SQL_COMMENTS, "true" );
+ cfg.setProperty( Environment.FORMAT_SQL, "true" );
+ cfg.setProperty( Environment.USE_STRUCTURED_CACHE, "true" );
+ cfg.setProperty( Environment.GENERATE_STATISTICS, "true" );
+ //search specific:
+ cfg.setProperty( org.hibernate.search.Environment.ANALYZER_CLASS,
+ StopAnalyzer.class.getName() );
+ cfg.setProperty( "hibernate.search.default.directory_provider",
+ RAMDirectoryProvider.class.getName() );
+ }
+
+ /**
+ * Override before building any parameter, or add new ones.
+ * @param key
+ * @param value
+ * @return the same builder (this)
+ */
+ public FullTextSessionBuilder setProperty(String key, String value) {
+ cfg.setProperty( key, value );
+ return this;
+ }
+
+ /**
+ * Adds classes to the SessionFactory being built
+ * @param annotatedClass
+ * @return the same builder (this)
+ */
+ public FullTextSessionBuilder addAnnotatedClass(Class annotatedClass) {
+ cfg.addAnnotatedClass( annotatedClass );
+ return this;
+ }
+
+ /**
+ * Creates a new FullTextSession based upon the configuration built so far.
+ * @return
+ */
+ public FullTextSession build() {
+ if ( session != null || sessionFactory != null ) {
+ throw new java.lang.IllegalStateException( "session is open already" );
+ }
+ sessionFactory = cfg.buildSessionFactory();
+ session = sessionFactory.openSession();
+ return Search.getFullTextSession( session );
+ }
+
+ /**
+ * Closes the provided FullTextSession and the SessionFactory
+ */
+ public void close() {
+ if ( session == null || sessionFactory == null ) {
+ throw new java.lang.IllegalStateException( "session not yet built" );
+ }
+ session.close();
+ session = null;
+ sessionFactory.close();
+ sessionFactory = null;
+ }
+
+}
Property changes on:
search/trunk/src/test/org/hibernate/search/test/util/FullTextSessionBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id