[hibernate-commits] Hibernate SVN: r15541 - in search/trunk: src/java/org/hibernate/search and 8 other directories.

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Mon Nov 10 15:14:05 EST 2008


Author: hardy.ferentschik
Date: 2008-11-10 15:14:05 -0500 (Mon, 10 Nov 2008)
New Revision: 15541

Added:
   search/trunk/src/java/org/hibernate/search/query/QueryHits.java
Modified:
   search/trunk/build.xml
   search/trunk/src/java/org/hibernate/search/ProjectionConstants.java
   search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
   search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
   search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java
   search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
   search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
   search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java
   search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
   search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java
   search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
   search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
Log:
HSEARCH-283
Replaced Hits with TopDocs

Modified: search/trunk/build.xml
===================================================================
--- search/trunk/build.xml	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/build.xml	2008-11-10 20:14:05 UTC (rev 15541)
@@ -24,7 +24,6 @@
     <property name="javac.source" value="1.5"/>
     <property name="javac.target" value="1.5"/>
     <property name="jdbc.dir" value="jdbc"/>
-    <property name="common.dir" value="${basedir}"/>
 
     <property name="ivy.dep.dir" value="${basedir}/build/lib"/>
 
@@ -37,7 +36,7 @@
     <taskdef resource="fr/jayasoft/ivy/ant/antlib.xml"
              uri="antlib:fr.jayasoft.ivy.ant" classpathref="ivy.lib.path"/>
 
-    <import file="${common.dir}/common-build.xml"/>
+    <import file="common-build.xml"/>
 
     <property name="build.testresources.dir" value="${build.dir}/testresources"/>
     <property name="testresources.dir" value="${basedir}/src/test-resources"/>
@@ -308,7 +307,7 @@
         </copy>
 
         <copy todir="${dist.dir}" failonerror="false">
-            <fileset dir="${common.dir}">
+            <fileset dir="${basedir}">
                 <include name="common-build.xml"/>
             </fileset>
         </copy>

Modified: search/trunk/src/java/org/hibernate/search/ProjectionConstants.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/ProjectionConstants.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/ProjectionConstants.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -11,30 +11,37 @@
 	 * Represtnts the Hibernate Entity returned in a search.
 	 */
 	public String THIS = "__HSearch_This";
+
 	/**
 	 * The Lucene document returned by a search.
 	 */
 	public String DOCUMENT = "__HSearch_Document";
+
 	/**
 	 * The legacy document's score from a search.
 	 */
 	public String SCORE = "__HSearch_Score";
+
 	/**
 	 * The boost value of the Lucene document.
+	 *
 	 * @deprecated always return 1
 	 */
 	public String BOOST = "__HSearch_Boost";
+
 	/**
 	 * Object id property
 	 */
 	public String ID = "__HSearch_id";
+
 	/**
 	 * Lucene Document id
 	 * Experimental: If you use this feature, please speak up in the forum
-	 *  
+	 * <p/>
 	 * Expert: Lucene document id can change overtime between 2 different IndexReader opening.
 	 */
 	public String DOCUMENT_ID = "__HSearch_DocumentId";
+	
 	/**
 	 * Lucene {@link org.apache.lucene.search.Explanation} object describing the score computation for
 	 * the matching object/document
@@ -44,7 +51,7 @@
 	 * and using fullTextQuery.explain(int)
 	 */
 	public String EXPLANATION = "__HSearch_Explanation";
-	
+
 	/**
 	 * Object class
 	 */

Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -5,27 +5,24 @@
 import java.io.Serializable;
 
 import org.apache.lucene.document.Document;
-import org.apache.lucene.search.Hits;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.hibernate.search.engine.EntityInfo;
+
 import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.query.QueryHits;
 
 /**
  * @author Emmanuel Bernard
  * @author John Griffin
+ * @author Hardy Ferentschik
  */
 public class DocumentExtractor {
 	private final SearchFactoryImplementor searchFactoryImplementor;
 	private final String[] projection;
-	private final IndexSearcher searcher;
-	private final Query preparedQuery;
+	private final QueryHits queryHits;
 
-	public DocumentExtractor(Query preparedQuery, IndexSearcher searcher, SearchFactoryImplementor searchFactoryImplementor, String... projection) {
+	public DocumentExtractor(QueryHits queryHits, SearchFactoryImplementor searchFactoryImplementor, String... projection) {
 		this.searchFactoryImplementor = searchFactoryImplementor;
 		this.projection = projection;
-		this.searcher = searcher;
-		this.preparedQuery = preparedQuery;
+		this.queryHits = queryHits;
 	}
 
 	private EntityInfo extract(Document document) {
@@ -35,20 +32,19 @@
 		if ( projection != null && projection.length > 0 ) {
 			projected = DocumentBuilder.getDocumentFields( searchFactoryImplementor, clazz, document, projection );
 		}
-		EntityInfo entityInfo = new EntityInfo( clazz, id, projected );
-		return entityInfo;
+		return new EntityInfo( clazz, id, projected );
 	}
 
-	public EntityInfo extract(Hits hits, int index) throws IOException {
-		Document doc = hits.doc( index );
+	public EntityInfo extract(int index) throws IOException {
+		Document doc = queryHits.doc( index );
 		//TODO if we are only looking for score (unlikely), avoid accessing doc (lazy load)
 		EntityInfo entityInfo = extract( doc );
 		Object[] eip = entityInfo.projection;
 
 		if ( eip != null && eip.length > 0 ) {
-			for (int x = 0; x < projection.length; x++) {
+			for ( int x = 0; x < projection.length; x++ ) {
 				if ( ProjectionConstants.SCORE.equals( projection[x] ) ) {
-					eip[x] = hits.score( index );
+					eip[x] = queryHits.score( index );
 				}
 				else if ( ProjectionConstants.ID.equals( projection[x] ) ) {
 					eip[x] = entityInfo.id;
@@ -57,18 +53,18 @@
 					eip[x] = doc;
 				}
 				else if ( ProjectionConstants.DOCUMENT_ID.equals( projection[x] ) ) {
-					eip[x] = hits.id( index );
+					eip[x] = queryHits.docId( index );
 				}
 				else if ( ProjectionConstants.BOOST.equals( projection[x] ) ) {
 					eip[x] = doc.getBoost();
 				}
 				else if ( ProjectionConstants.EXPLANATION.equals( projection[x] ) ) {
-					eip[x] = searcher.explain( preparedQuery, hits.id( index ) );
+					eip[x] = queryHits.explain( index );
 				}
 				else if ( ProjectionConstants.THIS.equals( projection[x] ) ) {
 					//THIS could be projected more than once
 					//THIS loading delayed to the Loader phase
-					entityInfo.indexesOfThis.add(x);
+					entityInfo.indexesOfThis.add( x );
 				}
 			}
 		}

Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -89,7 +89,7 @@
 	/**
 	 * {@inheritDoc}
 	 */
-	public void purgeAll(Class entityType) {
+	public <T> void purgeAll(Class<T> entityType) {
 		purge( entityType, null );
 	}
 
@@ -101,7 +101,7 @@
 	/**
 	 * {@inheritDoc}
 	 */
-	public void purge(Class<?> entityType, Serializable id) {
+	public <T> void purge(Class<T> entityType, Serializable id) {
 		if ( entityType == null ) {
 			return;
 		}
@@ -115,35 +115,35 @@
 			throw new IllegalArgumentException( msg );
 		}
 
-		Work work;
+		Work<T> work;
 		if ( id == null ) {
 			// purge the main entity
-			work = new Work( entityType, id, WorkType.PURGE_ALL );
+			work = new Work<T>( entityType, id, WorkType.PURGE_ALL );
 			searchFactoryImplementor.getWorker().performWork( work, transactionContext );
 
 			// purge the subclasses
 			Set<Class<?>> subClasses = builder.getMappedSubclasses();
+			Work subClassWork;
 			for ( Class clazz : subClasses ) {
-				work = new Work( clazz, id, WorkType.PURGE_ALL );
-				searchFactoryImplementor.getWorker().performWork( work, transactionContext );
+				subClassWork = new Work( clazz, id, WorkType.PURGE_ALL );
+				searchFactoryImplementor.getWorker().performWork( subClassWork, transactionContext );
 			}
 		}
 		else {
-			work = new Work( entityType, id, WorkType.PURGE );
+			work = new Work<T>( entityType, id, WorkType.PURGE );
 			searchFactoryImplementor.getWorker().performWork( work, transactionContext );
 		}
 	}
 
 	/**
-	 * (re)index an entity.
-	 * Non indexable entities are ignored
-	 * The entity must be associated with the session
+	 * (Re-)index an entity.
+	 * The entity must be associated with the session and non indexable entities are ignored.
 	 *
 	 * @param entity The entity to index - must not be <code>null</code>.
 	 *
 	 * @throws IllegalArgumentException if entity is null or not an @Indexed entity
 	 */
-	public void index(Object entity) {
+	public <T> void index(T entity) {
 		if ( entity == null ) {
 			throw new IllegalArgumentException( "Entity to index should not be null" );
 		}
@@ -157,7 +157,7 @@
 			throw new IllegalArgumentException( msg );
 		}
 		Serializable id = session.getIdentifier( entity );
-		Work work = new Work( entity, id, WorkType.INDEX );
+		Work<T> work = new Work<T>( entity, id, WorkType.INDEX );
 		searchFactoryImplementor.getWorker().performWork( work, transactionContext );
 
 		//TODO

Modified: search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -1,8 +1,8 @@
 // $Id$
 package org.hibernate.search.query;
 
-import java.util.Map;
 import java.util.HashMap;
+import java.util.Map;
 
 import org.hibernate.search.FullTextFilter;
 
@@ -30,7 +30,6 @@
 		return parameters.get( name );
 	}
 
-
 	public Map<String, Object> getParameters() {
 		return parameters;
 	}

Modified: search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -18,12 +18,12 @@
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.Hits;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Searcher;
 import org.apache.lucene.search.Similarity;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
 import org.slf4j.Logger;
 
 import org.hibernate.Criteria;
@@ -132,29 +132,29 @@
 			return new IteratorImpl( Collections.EMPTY_LIST, noLoader );
 		}
 		try {
-			QueryAndHits queryAndHits = getQueryAndHits( searcher );
+			QueryHits queryHits = getQueryHits( searcher );
 			int first = first();
-			int max = max( first, queryAndHits.hits );
-			Session sess = (Session) this.session;
+			int max = max( first, queryHits.totalHits );
+			Session sess = ( Session ) this.session;
 
 			int size = max - first + 1 < 0 ? 0 : max - first + 1;
 			List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
-			DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
-			for (int index = first; index <= max; index++) {
+			DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactoryImplementor, indexProjection );
+			for ( int index = first; index <= max; index++ ) {
 				//TODO use indexSearcher.getIndexReader().document( hits.id(index), FieldSelector(indexProjection) );
-				infos.add( extractor.extract( queryAndHits.hits, index ) );
+				infos.add( extractor.extract( index ) );
 			}
 			Loader loader = getLoader( sess, searchFactoryImplementor );
 			return new IteratorImpl( infos, loader );
 		}
-		catch (IOException e) {
+		catch ( IOException e ) {
 			throw new HibernateException( "Unable to query Lucene index", e );
 		}
 		finally {
 			try {
 				closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
 			}
-			catch (SearchException e) {
+			catch ( SearchException e ) {
 				log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
 			}
 		}
@@ -167,9 +167,11 @@
 			return loader;
 		}
 		if ( criteria != null ) {
-			if ( classes.length > 1 ) throw new SearchException( "Cannot mix criteria and multiple entity types" );
+			if ( classes.length > 1 ) {
+				throw new SearchException( "Cannot mix criteria and multiple entity types" );
+			}
 			if ( criteria instanceof CriteriaImpl ) {
-				String targetEntity = ( (CriteriaImpl) criteria ).getEntityOrClassName();
+				String targetEntity = ( ( CriteriaImpl ) criteria ).getEntityOrClassName();
 				if ( classes.length == 1 && !classes[0].getName().equals( targetEntity ) ) {
 					throw new SearchException( "Criteria query entity should match query entity" );
 				}
@@ -178,7 +180,7 @@
 						Class entityType = ReflectHelper.classForName( targetEntity );
 						classes = new Class[] { entityType };
 					}
-					catch (ClassNotFoundException e) {
+					catch ( ClassNotFoundException e ) {
 						throw new SearchException( "Unable to load entity class from criteria: " + targetEntity, e );
 					}
 				}
@@ -211,19 +213,21 @@
 		IndexSearcher searcher = buildSearcher( searchFactory );
 		//FIXME: handle null searcher
 		try {
-			QueryAndHits queryAndHits = getQueryAndHits( searcher );
+			QueryHits queryHits = getQueryHits( searcher );
 			int first = first();
-			int max = max( first, queryAndHits.hits );
-			DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactory, indexProjection );
-			Loader loader = getLoader( (Session) this.session, searchFactory );
-			return new ScrollableResultsImpl( searcher, queryAndHits.hits, first, max, fetchSize, extractor, loader, searchFactory );
+			int max = max( first, queryHits.totalHits );
+			DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactory, indexProjection );
+			Loader loader = getLoader( ( Session ) this.session, searchFactory );
+			return new ScrollableResultsImpl(
+					searcher, first, max, fetchSize, extractor, loader, searchFactory
+			);
 		}
-		catch (IOException e) {
+		catch ( IOException e ) {
 			//close only in case of exception
 			try {
 				closeSearcher( searcher, searchFactory.getReaderProvider() );
 			}
-			catch (SearchException ee) {
+			catch ( SearchException ee ) {
 				//we have the initial issue already
 			}
 			throw new HibernateException( "Unable to query Lucene index", e );
@@ -239,18 +243,20 @@
 		SearchFactoryImplementor searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
 		//find the directories
 		IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
-		if ( searcher == null ) return Collections.EMPTY_LIST;
+		if ( searcher == null ) {
+			return Collections.EMPTY_LIST;
+		}
 		try {
-			QueryAndHits queryAndHits = getQueryAndHits( searcher );
+			QueryHits queryHits = getQueryHits( searcher );
 			int first = first();
-			int max = max( first, queryAndHits.hits );
-			Session sess = (Session) this.session;
+			int max = max( first, queryHits.totalHits );
+			Session sess = ( Session ) this.session;
 
 			int size = max - first + 1 < 0 ? 0 : max - first + 1;
 			List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
-			DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
-			for (int index = first; index <= max; index++) {
-				infos.add( extractor.extract( queryAndHits.hits, index ) );
+			DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactoryImplementor, indexProjection );
+			for ( int index = first; index <= max; index++ ) {
+				infos.add( extractor.extract( index ) );
 			}
 			Loader loader = getLoader( sess, searchFactoryImplementor );
 			List list = loader.load( infos.toArray( new EntityInfo[infos.size()] ) );
@@ -262,14 +268,14 @@
 				return resultTransformer.transformList( list );
 			}
 		}
-		catch (IOException e) {
+		catch ( IOException e ) {
 			throw new HibernateException( "Unable to query Lucene index", e );
 		}
 		finally {
 			try {
 				closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
 			}
-			catch (SearchException e) {
+			catch ( SearchException e ) {
 				log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
 			}
 		}
@@ -279,16 +285,18 @@
 		Explanation explanation = null;
 		SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
 		Searcher searcher = buildSearcher( searchFactoryImplementor );
-		if (searcher == null) {
-			throw new SearchException("Unable to build explanation for document id:"
-					+ documentId + ". no index found");
+		if ( searcher == null ) {
+			throw new SearchException(
+					"Unable to build explanation for document id:"
+							+ documentId + ". no index found"
+			);
 		}
 		try {
 			org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
 			buildFilters();
 			explanation = searcher.explain( query, documentId );
 		}
-		catch (IOException e) {
+		catch ( IOException e ) {
 			throw new HibernateException( "Unable to query Lucene index and build explanation", e );
 		}
 		finally {
@@ -296,7 +304,7 @@
 			try {
 				closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
 			}
-			catch (SearchException e) {
+			catch ( SearchException e ) {
 				log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
 			}
 		}
@@ -307,16 +315,17 @@
 	 * Execute the lucene search and return the machting hits.
 	 *
 	 * @param searcher The index searcher.
-	 * @return The lucene hits.
+	 *
+	 * @return An instance of <code>QueryHits</code> wrapping the Lucene query and the matching documents.
+	 *
 	 * @throws IOException in case there is an error executing the lucene search.
 	 */
-	private QueryAndHits getQueryAndHits(Searcher searcher) throws IOException {
-		Hits hits;
+	private QueryHits getQueryHits(Searcher searcher) throws IOException {
 		org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
 		buildFilters();
-		hits = searcher.search( query, filter, sort );
-		setResultSize( hits );
-		return new QueryAndHits( query, hits );
+		QueryHits queryHits = new QueryHits( searcher, query, filter, sort );
+		resultSize = queryHits.totalHits;
+		return queryHits;
 	}
 
 	private void buildFilters() {
@@ -325,12 +334,14 @@
 		}
 
 		ChainedFilter chainedFilter = new ChainedFilter();
-		for (FullTextFilterImpl fullTextFilter : filterDefinitions.values()) {
-			Filter filter = buildLuceneFilter(fullTextFilter);
+		for ( FullTextFilterImpl fullTextFilter : filterDefinitions.values() ) {
+			Filter filter = buildLuceneFilter( fullTextFilter );
 			chainedFilter.addFilter( filter );
 		}
 
-		if ( filter != null ) chainedFilter.addFilter( filter );
+		if ( filter != null ) {
+			chainedFilter.addFilter( filter );
+		}
 		filter = chainedFilter;
 	}
 
@@ -339,6 +350,7 @@
 	 *
 	 * @param fullTextFilter the Hibernate specific <code>FullTextFilter</code> used to create the
 	 * Lucene <code>Filter</code>.
+	 *
 	 * @return the Lucene filter mapped to the filter definition
 	 */
 	private Filter buildLuceneFilter(FullTextFilterImpl fullTextFilter) {
@@ -350,8 +362,8 @@
 		 * as FilterCachingStrategy ensure a memory barrier between concurrent thread calls
 		 */
 		FilterDef def = searchFactoryImplementor.getFilterDefinition( fullTextFilter.getName() );
-		Object instance = createFilterInstance(fullTextFilter, def);
-		FilterKey key = createFilterKey(def, instance);
+		Object instance = createFilterInstance( fullTextFilter, def );
+		FilterKey key = createFilterKey( def, instance );
 
 		// try to get the filter out of the cache
 		Filter filter = cacheInstance( def.getCacheMode() ) ?
@@ -359,7 +371,7 @@
 				null;
 
 		if ( filter == null ) {
-			filter = createFilter(def, instance);
+			filter = createFilter( def, instance );
 
 			// add filter to cache if we have to
 			if ( cacheInstance( def.getCacheMode() ) ) {
@@ -373,33 +385,41 @@
 		Filter filter = null;
 		if ( def.getFactoryMethod() != null ) {
 			try {
-				filter = (Filter) def.getFactoryMethod().invoke( instance );
+				filter = ( Filter ) def.getFactoryMethod().invoke( instance );
 			}
-			catch (IllegalAccessException e) {
-				throw new SearchException( "Unable to access @Factory method: "
-						+ def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+			catch ( IllegalAccessException e ) {
+				throw new SearchException(
+						"Unable to access @Factory method: "
+								+ def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+				);
 			}
-			catch (InvocationTargetException e) {
-				throw new SearchException( "Unable to access @Factory method: "
-						+ def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+			catch ( InvocationTargetException e ) {
+				throw new SearchException(
+						"Unable to access @Factory method: "
+								+ def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+				);
 			}
-			catch (ClassCastException e) {
-				throw new SearchException( "@Key method does not return a org.apache.lucene.search.Filter class: "
-						+ def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+			catch ( ClassCastException e ) {
+				throw new SearchException(
+						"@Key method does not return a org.apache.lucene.search.Filter class: "
+								+ def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+				);
 			}
 		}
 		else {
 			try {
-				filter = (Filter) instance;
+				filter = ( Filter ) instance;
 			}
-			catch (ClassCastException e) {
-				throw new SearchException( "Filter implementation does not implement the Filter interface: "
-						+ def.getImpl().getName() + ". "
-						+ (def.getFactoryMethod() != null ? def.getFactoryMethod().getName() : ""), e );
+			catch ( ClassCastException e ) {
+				throw new SearchException(
+						"Filter implementation does not implement the Filter interface: "
+								+ def.getImpl().getName() + ". "
+								+ ( def.getFactoryMethod() != null ? def.getFactoryMethod().getName() : "" ), e
+				);
 			}
 		}
 
-		filter = addCachingWrapperFilter(filter, def);
+		filter = addCachingWrapperFilter( filter, def );
 		return filter;
 	}
 
@@ -408,13 +428,14 @@
 	 *
 	 * @param filter the filter which maybe gets wrapped.
 	 * @param def The filter definition used to decide whether wrapping should occur or not.
+	 *
 	 * @return The original filter or wrapped filter depending on the information extracted from
-	 * <code>def</code>.
+	 *         <code>def</code>.
 	 */
 	private Filter addCachingWrapperFilter(Filter filter, FilterDef def) {
 		if ( cacheResults( def.getCacheMode() ) ) {
 			int cachingWrapperFilterSize = getSearchFactoryImplementor().getFilterCacheBitResultsSize();
-			filter = new org.hibernate.search.filter.CachingWrapperFilter(filter, cachingWrapperFilterSize);
+			filter = new org.hibernate.search.filter.CachingWrapperFilter( filter, cachingWrapperFilterSize );
 		}
 
 		return filter;
@@ -433,27 +454,35 @@
 				}
 
 				public boolean equals(Object obj) {
-					if ( !( obj instanceof FilterKey ) ) return false;
-					FilterKey that = (FilterKey) obj;
+					if ( !( obj instanceof FilterKey ) ) {
+						return false;
+					}
+					FilterKey that = ( FilterKey ) obj;
 					return this.getImpl().equals( that.getImpl() );
 				}
 			};
 		}
 		else {
 			try {
-				key = (FilterKey) def.getKeyMethod().invoke( instance );
+				key = ( FilterKey ) def.getKeyMethod().invoke( instance );
 			}
-			catch (IllegalAccessException e) {
-				throw new SearchException( "Unable to access @Key method: "
-						+ def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+			catch ( IllegalAccessException e ) {
+				throw new SearchException(
+						"Unable to access @Key method: "
+								+ def.getImpl().getName() + "." + def.getKeyMethod().getName()
+				);
 			}
-			catch (InvocationTargetException e) {
-				throw new SearchException( "Unable to access @Key method: "
-						+ def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+			catch ( InvocationTargetException e ) {
+				throw new SearchException(
+						"Unable to access @Key method: "
+								+ def.getImpl().getName() + "." + def.getKeyMethod().getName()
+				);
 			}
-			catch (ClassCastException e) {
-				throw new SearchException( "@Key method does not return FilterKey: "
-						+ def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+			catch ( ClassCastException e ) {
+				throw new SearchException(
+						"@Key method does not return FilterKey: "
+								+ def.getImpl().getName() + "." + def.getKeyMethod().getName()
+				);
 			}
 		}
 		key.setImpl( def.getImpl() );
@@ -466,28 +495,29 @@
 	}
 
 	private Object createFilterInstance(FullTextFilterImpl fullTextFilter,
-			FilterDef def) {
+										FilterDef def) {
 		Object instance;
 		try {
 			instance = def.getImpl().newInstance();
 		}
-		catch (InstantiationException e) {
+		catch ( InstantiationException e ) {
 			throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
 		}
-		catch (IllegalAccessException e) {
+		catch ( IllegalAccessException e ) {
 			throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
 		}
-		for (Map.Entry<String, Object> entry : fullTextFilter.getParameters().entrySet()) {
+		for ( Map.Entry<String, Object> entry : fullTextFilter.getParameters().entrySet() ) {
 			def.invoke( entry.getKey(), instance, entry.getValue() );
 		}
-		if ( cacheInstance( def.getCacheMode() ) && def.getKeyMethod() == null && fullTextFilter.getParameters().size() > 0 ) {
+		if ( cacheInstance( def.getCacheMode() ) && def.getKeyMethod() == null && fullTextFilter.getParameters()
+				.size() > 0 ) {
 			throw new SearchException( "Filter with parameters and no @Key method: " + fullTextFilter.getName() );
 		}
 		return instance;
 	}
 
 	private org.apache.lucene.search.Query filterQueryByClasses(org.apache.lucene.search.Query luceneQuery) {
-		if ( ! needClassFilterClause ) {
+		if ( !needClassFilterClause ) {
 			return luceneQuery;
 		}
 		else {
@@ -496,7 +526,7 @@
 			BooleanQuery classFilter = new BooleanQuery();
 			//annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
 			classFilter.setBoost( 0 );
-			for (Class clazz : classesAndSubclasses) {
+			for ( Class clazz : classesAndSubclasses ) {
 				Term t = new Term( DocumentBuilder.CLASS_FIELDNAME, clazz.getName() );
 				TermQuery termQuery = new TermQuery( t );
 				classFilter.add( termQuery, BooleanClause.Occur.SHOULD );
@@ -508,12 +538,15 @@
 		}
 	}
 
-	private int max(int first, Hits hits) {
-		return maxResults == null ?
-				hits.length() - 1 :
-				maxResults + first < hits.length() ?
-						first + maxResults - 1 :
-						hits.length() - 1;
+	private int max(int first, int totalHits) {
+		if ( maxResults == null ) {
+			return totalHits - 1;
+		}
+		else {
+			return maxResults + first < totalHits ?
+					first + maxResults - 1 :
+					totalHits - 1;
+		}
 	}
 
 	private int first() {
@@ -537,12 +570,15 @@
 			// empty classes array means search over all indexed enities,
 			// but we have to make sure there is at least one
 			if ( builders.isEmpty() ) {
-				throw new HibernateException( "There are no mapped entities (don't forget to add @Indexed to at least one class)." );
+				throw new HibernateException(
+						"There are no mapped entities (don't forget to add @Indexed to at least one class)."
+				);
 			}
 
-			for (DocumentBuilder builder : builders.values()) {
+			for ( DocumentBuilder builder : builders.values() ) {
 				searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
-				final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForAllShards();
+				final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy()
+						.getDirectoryProvidersForAllShards();
 				populateDirectories( directories, directoryProviders, searchFactoryImplementor );
 			}
 			classesAndSubclasses = null;
@@ -550,18 +586,22 @@
 		else {
 			Set<Class<?>> involvedClasses = new HashSet<Class<?>>( classes.length );
 			Collections.addAll( involvedClasses, classes );
-			for (Class<?> clazz : classes) {
+			for ( Class<?> clazz : classes ) {
 				DocumentBuilder<?> builder = builders.get( clazz );
-				if ( builder != null ) involvedClasses.addAll( builder.getMappedSubclasses() );
+				if ( builder != null ) {
+					involvedClasses.addAll( builder.getMappedSubclasses() );
+				}
 			}
 
-			for (Class clazz : involvedClasses) {
+			for ( Class clazz : involvedClasses ) {
 				DocumentBuilder builder = builders.get( clazz );
 				//TODO should we rather choose a polymorphic path and allow non mapped entities
-				if ( builder == null )
+				if ( builder == null ) {
 					throw new HibernateException( "Not a mapped entity (don't forget to add @Indexed): " + clazz );
+				}
 
-				final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForAllShards();
+				final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy()
+						.getDirectoryProvidersForAllShards();
 				searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
 				populateDirectories( directories, directoryProviders, searchFactoryImplementor );
 			}
@@ -570,33 +610,41 @@
 
 		//compute optimization needClassFilterClause
 		//if at least one DP contains one class that is not part of the targeted classesAndSubclasses we can't optimize
-		if ( classesAndSubclasses != null) {
-			for (DirectoryProvider dp : directories) {
-				final Set<Class<?>> classesInDirectoryProvider = searchFactoryImplementor.getClassesInDirectoryProvider( dp );
+		if ( classesAndSubclasses != null ) {
+			for ( DirectoryProvider dp : directories ) {
+				final Set<Class<?>> classesInDirectoryProvider = searchFactoryImplementor.getClassesInDirectoryProvider(
+						dp
+				);
 				// if a DP contains only one class, we know for sure it's part of classesAndSubclasses
 				if ( classesInDirectoryProvider.size() > 1 ) {
 					//risk of needClassFilterClause
-					for (Class clazz : classesInDirectoryProvider) {
-						if ( ! classesAndSubclasses.contains( clazz ) ) {
+					for ( Class clazz : classesInDirectoryProvider ) {
+						if ( !classesAndSubclasses.contains( clazz ) ) {
 							this.needClassFilterClause = true;
 							break;
 						}
 					}
 				}
-				if ( this.needClassFilterClause ) break;
+				if ( this.needClassFilterClause ) {
+					break;
+				}
 			}
 		}
 
 		//set up the searcher
 		final DirectoryProvider[] directoryProviders = directories.toArray( new DirectoryProvider[directories.size()] );
-		IndexSearcher is = new IndexSearcher( searchFactoryImplementor.getReaderProvider().openReader( directoryProviders ) );
+		IndexSearcher is = new IndexSearcher(
+				searchFactoryImplementor.getReaderProvider().openReader(
+						directoryProviders
+				)
+		);
 		is.setSimilarity( searcherSimilarity );
 		return is;
 	}
 
 	private void populateDirectories(List<DirectoryProvider> directories, DirectoryProvider[] directoryProviders,
 									 SearchFactoryImplementor searchFactoryImplementor) {
-		for (DirectoryProvider provider : directoryProviders) {
+		for ( DirectoryProvider provider : directoryProviders ) {
 			if ( !directories.contains( provider ) ) {
 				directories.add( provider );
 			}
@@ -608,7 +656,10 @@
 			similarity = builder.getSimilarity();
 		}
 		else if ( !similarity.getClass().equals( builder.getSimilarity().getClass() ) ) {
-			throw new HibernateException( "Cannot perform search on two entities with differing Similarity implementations (" + similarity.getClass().getName() + " & " + builder.getSimilarity().getClass().getName() + ")" );
+			throw new HibernateException(
+					"Cannot perform search on two entities with differing Similarity implementations (" + similarity.getClass()
+							.getName() + " & " + builder.getSimilarity().getClass().getName() + ")"
+			);
 		}
 
 		return similarity;
@@ -617,16 +668,11 @@
 	private void closeSearcher(Searcher searcher, ReaderProvider readerProvider) {
 		Set<IndexReader> indexReaders = getIndexReaders( searcher );
 
-		for (IndexReader indexReader : indexReaders) {
+		for ( IndexReader indexReader : indexReaders ) {
 			readerProvider.closeReader( indexReader );
 		}
 	}
 
-	private void setResultSize(Hits hits) {
-		resultSize = hits.length();
-	}
-
-
 	public int getResultSize() {
 		if ( resultSize == null ) {
 			//get result size without object initialization
@@ -636,12 +682,12 @@
 				resultSize = 0;
 			}
 			else {
-				Hits hits;
+				TopDocs hits;
 				try {
-					hits = getQueryAndHits( searcher ).hits;
-					resultSize = hits.length();
+					hits = getQueryHits( searcher ).topDocs;
+					resultSize = hits.totalHits;
 				}
-				catch (IOException e) {
+				catch ( IOException e ) {
 					throw new HibernateException( "Unable to query Lucene index", e );
 				}
 				finally {
@@ -650,7 +696,7 @@
 						closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
 						//searchFactoryImplementor.getReaderProvider().closeReader( searcher.getIndexReader() );
 					}
-					catch (SearchException e) {
+					catch ( SearchException e ) {
 						log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
 					}
 				}
@@ -723,7 +769,9 @@
 			filterDefinitions = new HashMap<String, FullTextFilterImpl>();
 		}
 		FullTextFilterImpl filterDefinition = filterDefinitions.get( name );
-		if ( filterDefinition != null ) return filterDefinition;
+		if ( filterDefinition != null ) {
+			return filterDefinition;
+		}
 
 		filterDefinition = new FullTextFilterImpl();
 		filterDefinition.setName( name );
@@ -758,14 +806,4 @@
 			throw new UnsupportedOperationException( "noLoader should not be used" );
 		}
 	};
-
-	private static class QueryAndHits {
-		private QueryAndHits(org.apache.lucene.search.Query preparedQuery, Hits hits) {
-			this.preparedQuery = preparedQuery;
-			this.hits = hits;
-		}
-
-		public final org.apache.lucene.search.Query preparedQuery;
-		public final Hits hits;
-	}
 }

Added: search/trunk/src/java/org/hibernate/search/query/QueryHits.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/QueryHits.java	                        (rev 0)
+++ search/trunk/src/java/org/hibernate/search/query/QueryHits.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -0,0 +1,95 @@
+// $Id:$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.search.query;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+
+import org.hibernate.search.SearchException;
+
+/**
+ * A helper class which gives access to the current query and its hits. This class will dynamically
+ * reload the underlying <code>TopDocs</code> if required.
+ *
+ * @author Hardy Ferentschik
+ */
+public class QueryHits {
+
+	private static final int DEFAULT_TOP_DOC_RETRIEVAL_SIZE = 100;
+	public final org.apache.lucene.search.Query preparedQuery;
+	public final Searcher searcher;
+	public final Filter filter;
+	public final Sort sort;
+	public final int totalHits;
+	public TopDocs topDocs;
+	
+	public QueryHits(Searcher searcher, org.apache.lucene.search.Query preparedQuery, Filter filter, Sort sort)
+			throws IOException {
+		this.preparedQuery = preparedQuery;
+		this.searcher = searcher;
+		this.filter = filter;
+		this.sort = sort;
+		updateTopDocs( DEFAULT_TOP_DOC_RETRIEVAL_SIZE );
+		totalHits = topDocs.totalHits;
+	}
+
+	public Document doc(int index) throws IOException {
+		return searcher.doc( docId( index ) );
+	}
+
+	public ScoreDoc scoreDoc(int index) throws IOException {
+		if ( index >= totalHits ) {
+		  throw new SearchException("Not a valid ScoreDoc index: " + index);
+		}
+
+		// TODO - Is there a better way to get more TopDocs? Get more or less?
+		if ( index >= topDocs.scoreDocs.length ) {
+			updateTopDocs( 2 * index );
+		}
+
+		return topDocs.scoreDocs[index];
+	}
+
+	public int docId(int index) throws IOException {
+		return scoreDoc( index ).doc;
+	}
+
+	public float score(int index) throws IOException {
+		return scoreDoc( index ).score;
+	}
+
+	public Explanation explain(int index) throws IOException {
+		return searcher.explain( preparedQuery, docId( index ) );
+	}
+
+	private void updateTopDocs(int n) throws IOException {
+		if ( sort == null ) {
+			topDocs = searcher.search( preparedQuery, filter, n );
+		}
+		else {
+			topDocs = searcher.search( preparedQuery, filter, n, sort );
+		}
+	}
+}


Property changes on: search/trunk/src/java/org/hibernate/search/query/QueryHits.java
___________________________________________________________________
Name: svn:keywords
   + Id

Modified: search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -15,7 +15,6 @@
 import java.util.Map;
 import java.util.TimeZone;
 
-import org.apache.lucene.search.Hits;
 import org.apache.lucene.search.IndexSearcher;
 import org.slf4j.Logger;
 
@@ -52,9 +51,8 @@
  */
 public class ScrollableResultsImpl implements ScrollableResults {
 	private static final Logger log = LoggerFactory.make();
-	private final IndexSearcher searcher;
 	private final SearchFactory searchFactory;
-	private final Hits hits;
+	private final IndexSearcher searcher;
 	private final int first;
 	private final int max;
 	private final int fetchSize;
@@ -64,13 +62,11 @@
 	private final DocumentExtractor documentExtractor;
 	private final Map<EntityInfo, Object[]> resultContext;
 
-	public ScrollableResultsImpl(
-			IndexSearcher searcher, Hits hits, int first, int max, int fetchSize, DocumentExtractor extractor,
+	public ScrollableResultsImpl( IndexSearcher searcher, int first, int max, int fetchSize, DocumentExtractor extractor,
 			Loader loader, SearchFactory searchFactory
 	) {
-		this.searcher = searcher;
 		this.searchFactory = searchFactory;
-		this.hits = hits;
+		this.searcher = searcher;
 		this.first = first;
 		this.max = max;
 		this.current = first;
@@ -106,12 +102,12 @@
 			try {
 				if ( entityInfos[x - first] == null ) {
 					//FIXME should check that clazz match classes but this complicates a lot the firstResult/maxResult
-					entityInfos[x - first] = documentExtractor.extract( hits, x );
+					entityInfos[x - first] = documentExtractor.extract( x );
 					entityInfosLoaded.add( entityInfos[x - first] );
 				}
 			}
 			catch (IOException e) {
-				throw new HibernateException( "Unable to read Lucene hits[" + x + "]", e );
+				throw new HibernateException( "Unable to read Lucene topDocs[" + x + "]", e );
 			}
 
 		}
@@ -165,7 +161,7 @@
 	 * amount positive or negative, we perform the same tests that
 	 * we performed in next() and previous().
 	 *
-	 * @param i
+	 * @param i the scroll distance.
 	 * @return boolean
 	 * @throws HibernateException
 	 */

Modified: search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -10,10 +10,11 @@
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermDocs;
 import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Hits;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+
 import org.hibernate.Session;
 import org.hibernate.search.Environment;
 import org.hibernate.search.store.FSDirectoryProvider;
@@ -132,10 +133,11 @@
 		try {
 			QueryParser qp = new QueryParser( "id", new StandardAnalyzer() );
 			Query query = qp.parse( "title:Action OR Abstract:Action" );
-			Hits hits = searcher.search( query );
-			assertEquals( 2, hits.length() );
-			assertTrue( hits.score( 0 ) == 2 * hits.score( 1 ) );
-			assertEquals( "Hibernate in Action", hits.doc( 0 ).get( "title" ) );
+			TopDocs hits = searcher.search( query, 1000 );
+			assertEquals( 2, hits.totalHits );
+			assertTrue( hits.scoreDocs[0].score == 2 * hits.scoreDocs[1].score );
+			org.apache.lucene.document.Document doc = searcher.doc( 0 );
+			assertEquals( "Hibernate in Action", doc.get( "title" ) );
 		}
 		finally {
 			searcher.close();
@@ -164,9 +166,10 @@
 		// ( fails when deleting -concurrently- to IndexSearcher initialization! )
 		FileHelper.delete(getBaseIndexDir());
 		TermQuery query = new TermQuery( new Term("title","action") );
-		Hits hits = searcher.search( query );
-		assertEquals( 1, hits.length() );
-		assertEquals( "Hibernate Search in Action", hits.doc( 0 ).get( "title" ) );
+		TopDocs hits = searcher.search( query, 1000 );
+		assertEquals( 1, hits.totalHits );
+		org.apache.lucene.document.Document doc = searcher.doc( 0 );
+		assertEquals( "Hibernate Search in Action", doc.get( "title" ) );
 		searcher.close();
 	}
 

Modified: search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -2,9 +2,9 @@
 
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Hits;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
 
 import org.hibernate.Session;
 import org.hibernate.Transaction;
@@ -61,12 +61,12 @@
 		DirectoryProvider provider = fullTextSession.getSearchFactory()
 				.getDirectoryProviders( ProvidedIdPerson.class )[0];
 		IndexSearcher searcher = new IndexSearcher( provider.getDirectory() );
-		Hits hits = searcher.search( luceneQuery );
+		TopDocs hits = searcher.search( luceneQuery, 1000 );
 		searcher.close();
 		transaction.commit();
 		session.close();
 
-		assertEquals( 3, hits.length() );
+		assertEquals( 3, hits.totalHits );
 	}
 
 

Modified: search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -10,15 +10,18 @@
 import org.apache.lucene.queryParser.ParseException;
 import org.apache.lucene.queryParser.QueryParser;
 import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.Hits;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.document.Document;
 import org.slf4j.Logger;
 
 import org.hibernate.SessionFactory;
 import org.hibernate.search.FullTextQuery;
 import org.hibernate.search.FullTextSession;
 import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
 import org.hibernate.search.util.LoggerFactory;
 
 /**
@@ -70,12 +73,13 @@
 			Query q = getQuery();
 			long start = System.currentTimeMillis();
 			// Search
-			Hits hits = indexsearcher.search( q );
+			TopDocs hits = indexsearcher.search( q, 1000 );
 			List<String> names = new ArrayList<String>(100);
 			for (int i = 0 ; i < 100 ; i++) {
-				names.add( hits.doc( i ).get( "name" ) );
+				Document doc = getDocument( indexsearcher, hits.scoreDocs[i].doc );
+				names.add( doc.get( "name" ) );
 			}
-			int resultSize = hits.length();
+			int resultSize = hits.totalHits;
 			long totalTime = System.currentTimeMillis() - start;
 //			log.error( "Lucene [ Thread-id : " + threadId + " ] Total time taken for search is : " + totalTime + "ms with total no. of matching records : " + hits.length() );
 			setTime( totalTime );
@@ -92,6 +96,14 @@
 		}
 	}
 
+	private Document getDocument(Searcher searcher, int docId ) {
+		try {
+			return searcher.doc( docId );
+		} catch (IOException ioe) {
+			throw new SearchException( "Unable to retrieve document", ioe );
+		}
+	}
+
 	private Query getQuery() throws ParseException {
 		QueryParser qp = new QueryParser( "t", new StandardAnalyzer() );
 		qp.setLowercaseExpandedTerms( true );

Modified: search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -230,7 +230,7 @@
 		assertEquals( "dept incorrect", "Accounting", projection[2] );
 		assertEquals( "THIS incorrect", "Jackson", ( (Employee) projection[3] ).getLastname() );
 		assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (Serializable) projection[0] ) );
-		assertEquals( "SCORE incorrect", 1.0F, projection[4] );
+		assertEquals( "SCORE incorrect", 1.9162908F, projection[4] );
 		assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
 		assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[5] ).getFields().size() );
 		assertEquals( "ID incorrect", 1001, projection[6] );
@@ -249,7 +249,7 @@
 		assertTrue( "DOCUMENT incorrect", projection[0] instanceof Document );
 		assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[0] ).getFields().size() );
 		assertEquals( "THIS incorrect", projection[1], s.get( Employee.class, (Serializable) projection[4] ) );
-		assertEquals( "SCORE incorrect", 1.0F, projection[2] );
+		assertEquals( "SCORE incorrect", 1.9162908F, projection[2] );
 		assertNull( "BOOST not removed", projection[3] );
 		assertEquals( "ID incorrect", 1001, projection[4] );
 		assertEquals( "id incorrect", 1001, projection[5] );

Modified: search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java	2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java	2008-11-10 20:14:05 UTC (rev 15541)
@@ -13,6 +13,8 @@
 import org.apache.lucene.queryParser.ParseException;
 import org.apache.lucene.queryParser.QueryParser;
 import org.apache.lucene.search.Query;
+import org.slf4j.Logger;
+
 import org.hibernate.Session;
 import org.hibernate.SessionFactory;
 import org.hibernate.Transaction;
@@ -22,11 +24,15 @@
 import org.hibernate.search.store.FSDirectoryProvider;
 import org.hibernate.search.test.SearchTestCase;
 import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
 
 /**
  * @author Emmanuel Bernard
  */
 public abstract class ReaderPerfTestCase extends SearchTestCase {
+
+	private static final Logger log = LoggerFactory.make();
+
 	protected void setUp() throws Exception {
 		File sub = getBaseIndexDir();
 		sub.mkdir();
@@ -50,7 +56,9 @@
 
 	protected void tearDown() throws Exception {
 		super.tearDown();
-		if ( getSessions() != null ) getSessions().close();
+		if ( getSessions() != null ) {
+			getSessions().close();
+		}
 		File sub = getBaseIndexDir();
 		FileHelper.delete( sub );
 	}
@@ -58,22 +66,24 @@
 	public boolean insert = true;
 
 	public void testConcurrency() throws Exception {
-		Session s = openSession( );
+		Session s = openSession();
 		Transaction tx = s.beginTransaction();
-		for ( int index = 0 ; index < 5000 ; index++ ) {
+		for ( int index = 0; index < 5000; index++ ) {
 			Detective detective = new Detective();
 			detective.setName( "John Doe " + index );
 			detective.setBadge( "123455" + index );
-			detective.setPhysicalDescription( "Blond green eye etc etc");
+			detective.setPhysicalDescription( "Blond green eye etc etc" );
 			s.persist( detective );
 			Suspect suspect = new Suspect();
 			suspect.setName( "Jane Doe " + index );
-			suspect.setPhysicalDescription( "brunette, short, 30-ish");
+			suspect.setPhysicalDescription( "brunette, short, 30-ish" );
 			if ( index % 20 == 0 ) {
 				suspect.setSuspectCharge( "thief liar " );
 			}
 			else {
-				suspect.setSuspectCharge( " It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery.  Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud.");
+				suspect.setSuspectCharge(
+						" It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery.  Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud."
+				);
 			}
 			s.persist( suspect );
 		}
@@ -88,6 +98,7 @@
 		ReverseWork reverseWork = new ReverseWork( getSessions() );
 		long start = System.currentTimeMillis();
 		int iteration = 100;
+		log.info( "Starting worker threads." );
 		for ( int i = 0; i < iteration; i++ ) {
 			es.execute( work );
 			es.execute( reverseWork );
@@ -95,12 +106,11 @@
 		while ( work.count < iteration - 1 ) {
 			Thread.sleep( 20 );
 		}
-		System.out.println( iteration + " iterations in " + nThreads + " threads: " + ( System
-				.currentTimeMillis() - start ) );
+		log.debug( iteration + " iterations in " + nThreads + " threads: " + ( System.currentTimeMillis() - start ) );
 	}
 
 	protected class Work implements Runnable {
-		private Random random = new Random( );
+		private Random random = new Random();
 		private SessionFactory sf;
 		public volatile int count = 0;
 
@@ -112,12 +122,13 @@
 			Session s = sf.openSession();
 			Transaction tx = s.beginTransaction();
 			QueryParser parser = new MultiFieldQueryParser(
-					new String[] {"name", "physicalDescription", "suspectCharge"},
-					new StandardAnalyzer() );
+					new String[] { "name", "physicalDescription", "suspectCharge" },
+					new StandardAnalyzer()
+			);
 			FullTextQuery query = getQuery( "John Doe", parser, s );
 			assertTrue( query.getResultSize() != 0 );
 
-			query = getQuery( "green",  parser, s );
+			query = getQuery( "green", parser, s );
 			random.nextInt( query.getResultSize() - 15 );
 			query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
 			query.setMaxResults( 10 );
@@ -131,18 +142,18 @@
 			query = getQuery( "John Doe", parser, s );
 			assertTrue( query.getResultSize() != 0 );
 
-			query = getQuery( "thief",  parser, s );
+			query = getQuery( "thief", parser, s );
 			int firstResult = random.nextInt( query.getResultSize() - 15 );
 			query.setFirstResult( firstResult );
 			query.setMaxResults( 10 );
 			List result = query.list();
-			Object object = result.get(0);
-			if (insert && object instanceof Detective) {
-				Detective detective = (Detective) object;
+			Object object = result.get( 0 );
+			if ( insert && object instanceof Detective ) {
+				Detective detective = ( Detective ) object;
 				detective.setPhysicalDescription( detective.getPhysicalDescription() + " Eye" + firstResult );
 			}
-			else if (insert && object instanceof Suspect) {
-				Suspect suspect = (Suspect) object;
+			else if ( insert && object instanceof Suspect ) {
+				Suspect suspect = ( Suspect ) object;
 				suspect.setPhysicalDescription( suspect.getPhysicalDescription() + " Eye" + firstResult );
 			}
 			tx.commit();
@@ -153,9 +164,9 @@
 		private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
 			Query luceneQuery = null;
 			try {
-				luceneQuery = parser.parse(queryString);
+				luceneQuery = parser.parse( queryString );
 			}
-			catch (ParseException e) {
+			catch ( ParseException e ) {
 				e.printStackTrace();
 			}
 			return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );
@@ -174,12 +185,13 @@
 			Session s = sf.openSession();
 			Transaction tx = s.beginTransaction();
 			QueryParser parser = new MultiFieldQueryParser(
-					new String[] {"name", "physicalDescription", "suspectCharge"},
-					new StandardAnalyzer() );
+					new String[] { "name", "physicalDescription", "suspectCharge" },
+					new StandardAnalyzer()
+			);
 			FullTextQuery query = getQuery( "John Doe", parser, s );
 			assertTrue( query.getResultSize() != 0 );
 
-			query = getQuery( "london",  parser, s );
+			query = getQuery( "london", parser, s );
 			random.nextInt( query.getResultSize() - 15 );
 			query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
 			query.setMaxResults( 10 );
@@ -193,7 +205,7 @@
 			getQuery( "John Doe", parser, s );
 			assertTrue( query.getResultSize() != 0 );
 
-			query = getQuery( "green",  parser, s );
+			query = getQuery( "green", parser, s );
 			random.nextInt( query.getResultSize() - 15 );
 			query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
 			query.setMaxResults( 10 );
@@ -205,9 +217,9 @@
 		private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
 			Query luceneQuery = null;
 			try {
-				luceneQuery = parser.parse(queryString);
+				luceneQuery = parser.parse( queryString );
 			}
-			catch (ParseException e) {
+			catch ( ParseException e ) {
 				e.printStackTrace();
 			}
 			return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );




More information about the hibernate-commits mailing list