[hibernate-commits] Hibernate SVN: r12898 - in trunk/HibernateExt/search/src: java/org/hibernate/search/query and 1 other directories.

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Fri Aug 3 14:17:21 EDT 2007


Author: epbernard
Date: 2007-08-03 14:17:21 -0400 (Fri, 03 Aug 2007)
New Revision: 12898

Modified:
   trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
Log:
HSEARCH-14 fetchSize for scrollable result sets (John Griffin)

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java	2007-08-03 17:42:55 UTC (rev 12897)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java	2007-08-03 18:17:21 UTC (rev 12898)
@@ -25,7 +25,7 @@
 	 * @return this for method chaining
 	 */
 	FullTextQuery setSort(Sort sort);
-	
+
 	/**
 	 * Allows to use lucene filters.
 	 * Semi-deprecated? a preferred way is to use the @FullTextFilterDef approach
@@ -37,7 +37,7 @@
 
 	/**
 	 * Returns the number of hits for this search
-	 *
+	 * <p/>
 	 * Caution:
 	 * The number of results might be slightly different from
 	 * <code>list().size()</code> because list() if the index is
@@ -48,10 +48,9 @@
 	/**
 	 * Defines the Database Query used to load the Lucene results.
 	 * Useful to load a given object graph by refining the fetch modes
-	 *
+	 * <p/>
 	 * No projection (criteria.setProjection() ) allowed, the root entity must be the only returned type
 	 * No where restriction can be defined either.
-	 *
 	 */
 	FullTextQuery setCriteriaQuery(Criteria criteria);
 
@@ -59,19 +58,17 @@
 	 * Defines the Lucene field names projected and returned in a query result
 	 * Each field is converted back to it's object representation, an Object[] being returned for each "row"
 	 * (similar to an HQL or a Criteria API projection).
-	 *
+	 * <p/>
 	 * A projectable field must be stored in the Lucene index and use a {@link org.hibernate.search.bridge.TwoWayFieldBridge}
 	 * Unless notified in their JavaDoc, all built-in bridges are two-way. All @DocumentId fields are projectable by design.
-	 *
+	 * <p/>
 	 * If the projected field is not a projectable field, null is returned in the object[]
-	 *
 	 */
 	FullTextQuery setProjection(String... fields);
 
 	/**
 	 * Will be removed in the near future
 	 * @deprecated Use #setProjection
-	 *
 	 */
 	FullTextQuery setIndexProjection(String... fields);
 
@@ -95,4 +92,9 @@
 	 */
 	FullTextQuery setMaxResults(int maxResults);
 
+	/**
+	 * Defines scrollable result fetch size as well as the JDBC fetch size
+	 */
+	FullTextQuery setFetchSize(int i);
+
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2007-08-03 17:42:55 UTC (rev 12897)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2007-08-03 18:17:21 UTC (rev 12898)
@@ -73,6 +73,7 @@
 	private String[] indexProjection;
 	private SearchFactoryImplementor searchFactoryImplementor;
 	private Map<String, FullTextFilterImpl> filterDefinitions;
+	private int fetchSize = 1;
 
 	/**
 	 * classes must be immutable
@@ -202,7 +203,7 @@
 			int max = max( first, hits );
 			DocumentExtractor extractor = new DocumentExtractor( searchFactory, indexProjection );
 			Loader loader = getLoader( (Session) this.session, searchFactory );
-			return new ScrollableResultsImpl( searcher, hits, first, max, extractor, loader, searchFactory );
+			return new ScrollableResultsImpl( searcher, hits, first, max, fetchSize, extractor, loader, searchFactory );
 		}
 		catch (IOException e) {
 			//close only in case of exception
@@ -524,6 +525,15 @@
 		return this;
 	}
 
+	public FullTextQuery setFetchSize(int fetchSize) {
+		super.setFetchSize( fetchSize );
+		if ( fetchSize <= 0 ) {
+			throw new IllegalArgumentException( "'fetch size' parameter less than or equals to 0" );
+		}
+		this.fetchSize = fetchSize;
+		return this;
+	}
+
 	public int executeUpdate() throws HibernateException {
 		throw new HibernateException( "Not supported operation" );
 	}

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2007-08-03 17:42:55 UTC (rev 12897)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2007-08-03 18:17:21 UTC (rev 12898)
@@ -12,6 +12,8 @@
 import java.util.Locale;
 import java.util.Map;
 import java.util.TimeZone;
+import java.util.List;
+import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -33,6 +35,7 @@
  * exposed to returned null objects (if the index is out of date).
  *
  * @author Emmanuel Bernard
+ * @author John Griffin
  */
 public class ScrollableResultsImpl implements ScrollableResults {
 	private static Log log = LogFactory.getLog( ScrollableResultsImpl.class );
@@ -41,6 +44,7 @@
 	private final Hits hits;
 	private final int first;
 	private final int max;
+	private final int fetchSize;
 	private int current;
 	private EntityInfo[] entityInfos;
 	private Loader loader;
@@ -48,7 +52,7 @@
 	private Map<EntityInfo, Object[]> resultContext;
 
 	public ScrollableResultsImpl(
-			IndexSearcher searcher, Hits hits, int first, int max, DocumentExtractor extractor,
+			IndexSearcher searcher, Hits hits, int first, int max, int fetchSize, DocumentExtractor extractor,
 			Loader loader, SearchFactory searchFactory
 	) {
 		this.searcher = searcher;
@@ -62,8 +66,55 @@
 		int size = max - first + 1 > 0 ? max - first + 1 : 0;
 		this.entityInfos = new EntityInfo[size];
 		this.resultContext = new HashMap<EntityInfo, Object[]>( size );
+		this.fetchSize = fetchSize;
 	}
 
+	// The 'cache' is a sliding window that moves back and
+	// forth over entityInfos loading values of size fetchSize
+	// as necessary.
+	private EntityInfo loadCache(int windowStart) {
+		int windowStop;
+
+		EntityInfo info = entityInfos[windowStart - first];
+		if ( info != null ) {
+			//data has already been loaded
+			return info;
+		}
+
+		if ( windowStart + fetchSize > max) {
+			windowStop = max;
+		}
+		else {
+			windowStop = windowStart + fetchSize - 1;
+		}
+
+		List<EntityInfo> entityInfosLoaded = new ArrayList<EntityInfo>(windowStop - windowStart + 1);
+		for (int x = windowStart; x <= windowStop; x++) {
+			try {
+				if (entityInfos[x - first] == null) {
+					//FIXME should check that clazz match classes but this complicates a lot the firstResult/maxResult
+					entityInfos[x - first] = documentExtractor.extract( hits, x );
+					entityInfosLoaded.add( entityInfos[x - first] );
+				}
+			}
+			catch (IOException e) {
+				throw new HibernateException( "Unable to read Lucene hits[" + x + "]", e );
+			}
+
+		}
+		//preload effitciently first
+		loader.load( entityInfosLoaded.toArray( new EntityInfo[entityInfosLoaded.size()] ) );
+		//load one by one to inject null results if needed
+		for ( EntityInfo slidingInfo : entityInfosLoaded ) {
+			if ( !resultContext.containsKey( slidingInfo ) ) {
+				Object loaded = loader.load( slidingInfo );
+				if ( !loaded.getClass().isArray() ) loaded = new Object[] { loaded };
+				resultContext.put( slidingInfo, (Object[]) loaded );
+			}
+		}
+		return entityInfos[windowStart - first];
+	}
+
 	public boolean next() throws HibernateException {
 		return ++current <= max;
 	}
@@ -120,23 +171,8 @@
 		// do that since we have to make up for
 		// an Object[]. J.G
 		if ( current < first || current > max ) return null;
-		EntityInfo info = entityInfos[current - first];
-		if ( info == null ) {
-			try {
-				info = documentExtractor.extract( hits, current );
-			}
-			catch (IOException e) {
-				throw new HibernateException( "Unable to read Lucene hits[" + current + "]", e );
-			}
-			//FIXME should check that clazz match classes but this complicates a lot the firstResult/maxResult
-			entityInfos[current - first] = info;
-		}
-		if ( !resultContext.containsKey( info ) ) {
-			Object loaded = loader.load( info );
-			if ( !loaded.getClass().isArray() ) loaded = new Object[] { loaded };
-			resultContext.put( info, (Object[]) loaded );
-		}
-		return resultContext.get( info );
+		loadCache( current );
+		return resultContext.get( entityInfos[current - first] );
 	}
 
 	public Object get(int i) throws HibernateException {

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java	2007-08-03 17:42:55 UTC (rev 12897)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java	2007-08-03 18:17:21 UTC (rev 12898)
@@ -21,6 +21,7 @@
 
 /**
  * @author Emmanuel Bernard
+ * @author John Griffin
  */
 public class LuceneQueryTest extends SearchTestCase {
 
@@ -250,6 +251,165 @@
 		s.close();
 	}
 
+	// Technically this is checked by other tests but let's do it anyway. J.G.
+	public void testDefaultFetchSize() throws Exception {
+		FullTextSession s = Search.createFullTextSession( openSession() );
+		prepEmployeeIndex( s );
+
+		s.clear();
+		Transaction tx = s.beginTransaction();
+		QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+		Query query = parser.parse( "dept:ITech" );
+		org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+		hibQuery.setProjection( "id", "lastname", "dept" );
+
+		ScrollableResults projections = hibQuery.scroll();
+		projections.beforeFirst();
+		Object[] projection = projections.get();
+		assertNull( projection );
+
+		projections.next();
+		assertTrue( projections.isFirst() );
+
+		//cleanup
+		for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+		tx.commit();
+		s.close();
+	}
+
+	public void testFetchSizeLargerThanHits() throws Exception {
+		FullTextSession s = Search.createFullTextSession( openSession() );
+		prepEmployeeIndex( s );
+
+		s.clear();
+		Transaction tx = s.beginTransaction();
+		QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+		Query query = parser.parse( "dept:ITech" );
+		org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+		hibQuery.setProjection( "id", "lastname", "dept" );
+		hibQuery.setFetchSize( 6 );
+
+		ScrollableResults results = hibQuery.scroll();
+		results.beforeFirst();
+		results.next();
+		Object[] result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1000, result[0] );
+
+		//cleanup
+		for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+		tx.commit();
+		s.close();
+	}
+
+	public void testFetchSizeDefaultFirstAndMax() throws Exception {
+		FullTextSession s = Search.createFullTextSession( openSession() );
+		prepEmployeeIndex( s );
+
+		s.clear();
+		Transaction tx = s.beginTransaction();
+		QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+		Query query = parser.parse( "dept:ITech" );
+		org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+		hibQuery.setProjection( "id", "lastname", "dept" );
+		hibQuery.setFetchSize( 3 );
+
+		ScrollableResults results = hibQuery.scroll();
+		results.beforeFirst();
+		results.next();
+		Object[] result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1000, result[0] );
+		results.scroll( 2 );
+		result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1003, result[0] );
+		// check cache addition
+		results.next();
+		result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1004, result[0] );
+
+		results.scroll( -2 );
+		result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1002, result[0] );
+
+		//cleanup
+		for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+		tx.commit();
+		s.close();
+	}
+
+	public void testFetchSizeNonDefaultFirstAndMax() throws Exception {
+		FullTextSession s = Search.createFullTextSession( openSession() );
+		prepEmployeeIndex( s );
+
+		s.clear();
+		Transaction tx = s.beginTransaction();
+		QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+		Query query = parser.parse( "dept:ITech" );
+		org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+		hibQuery.setProjection( "id", "lastname", "dept" );
+		hibQuery.setFetchSize( 3 );
+		hibQuery.setFirstResult( 1 );
+		hibQuery.setMaxResults( 3 );
+
+		ScrollableResults results = hibQuery.scroll();
+		results.beforeFirst();
+		results.next();
+		Object[] result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1002, result[0] );
+
+		results.scroll( 2 );
+		result = results.get();
+		assertEquals( "incorrect entityInfo returned", 1004, result[0] );
+
+		results.next();
+		result = results.get();
+		assertNull( result );
+
+		// Let's see if a bad reverse scroll screws things up
+		/**
+		 * These instructions uncover problems with calculations
+		 * of 'current'. It should be limited by first and max.
+		 */
+		results.scroll( -6 );
+		result = results.get();
+		assertNull( result );
+
+		//cleanup
+		for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+		tx.commit();
+		s.close();
+	}
+
+	public void testFetchSizeNonDefaultFirstAndMaxNoHits() throws Exception {
+		FullTextSession s = Search.createFullTextSession( openSession() );
+		prepEmployeeIndex( s );
+
+		s.clear();
+		Transaction tx = s.beginTransaction();
+		QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+		Query query = parser.parse( "dept:XXX" );
+		org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+		hibQuery.setProjection( "id", "lastname", "dept" );
+		hibQuery.setFetchSize( 3 );
+		hibQuery.setFirstResult( 1 );
+		hibQuery.setMaxResults( 3 );
+
+		ScrollableResults results = hibQuery.scroll();
+		results.beforeFirst();
+		Object[] result = results.get();
+		assertNull( "non-null entity infos returned", result );
+
+		//cleanup
+		for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+		tx.commit();
+		s.close();
+	}
+
+
 	public void testMultipleEntityPerIndex() throws Exception {
 		FullTextSession s = Search.createFullTextSession( openSession() );
 		Transaction tx = s.beginTransaction();




More information about the hibernate-commits mailing list