Author: epbernard
Date: 2007-07-29 12:30:48 -0400 (Sun, 29 Jul 2007)
New Revision: 12849
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
Log:
HSEARCH-90 avoid lists with negative list capacity on setfirstresult
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
---
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-29
15:59:13 UTC (rev 12848)
+++
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-29
16:30:48 UTC (rev 12849)
@@ -123,14 +123,15 @@
int max = max( first, hits );
Session sess = (Session) this.session;
- List<EntityInfo> entityInfos = new ArrayList<EntityInfo>( max - first + 1
);
+ int size = max - first + 1 < 0 ? 0 : max - first + 1;
+ List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
DocumentExtractor extractor = new DocumentExtractor( searchFactoryImplementor,
indexProjection );
for (int index = first; index <= max; index++) {
//TODO use indexSearcher.getIndexReader().document( hits.id(index),
FieldSelector(indexProjection) );
- entityInfos.add( extractor.extract( hits, index ) );
+ infos.add( extractor.extract( hits, index ) );
}
Loader loader = getLoader( sess, searchFactoryImplementor );
- return new IteratorImpl( entityInfos, loader );
+ return new IteratorImpl( infos, loader );
}
catch (IOException e) {
throw new HibernateException( "Unable to query Lucene index", e );
@@ -231,7 +232,9 @@
int first = first();
int max = max( first, hits );
Session sess = (Session) this.session;
- List<EntityInfo> infos = new ArrayList<EntityInfo>( max - first + 1 );
+
+ int size = max - first + 1 < 0 ? 0 : max - first + 1;
+ List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
DocumentExtractor extractor = new DocumentExtractor( searchFactoryImplementor,
indexProjection );
for (int index = first; index <= max; index++) {
infos.add( extractor.extract( hits, index ) );
@@ -506,11 +509,17 @@
}
public FullTextQuery setFirstResult(int firstResult) {
+ if (firstResult < 0) {
+ throw new IllegalArgumentException("'first' pagination parameter less
than 0");
+ }
this.firstResult = firstResult;
return this;
}
public FullTextQuery setMaxResults(int maxResults) {
+ if (maxResults < 0) {
+ throw new IllegalArgumentException("'max' pagination parameter less than
0");
+ }
this.maxResults = maxResults;
return this;
}
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
---
trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-07-29
15:59:13 UTC (rev 12848)
+++
trunk/HibernateExt/search/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2007-07-29
16:30:48 UTC (rev 12849)
@@ -59,8 +59,9 @@
this.current = first;
this.loader = loader;
this.documentExtractor = extractor;
- this.entityInfos = new EntityInfo[max - first + 1];
- this.resultContext = new HashMap<EntityInfo, Object[]>( max - first + 1 );
+ int size = max - first + 1 > 0 ? max - first + 1 : 0;
+ this.entityInfos = new EntityInfo[size];
+ this.resultContext = new HashMap<EntityInfo, Object[]>( size );
}
public boolean next() throws HibernateException {
@@ -121,14 +122,13 @@
if ( current < first || current > max ) return null;
EntityInfo info = entityInfos[current - first];
if ( info == null ) {
- Document document = null;
try {
info = documentExtractor.extract( hits, current );
}
catch (IOException e) {
throw new HibernateException( "Unable to read Lucene hits[" + current +
"]", e );
}
- //FIXME should check that clazz match classes but this complexify a lot the
firstResult/maxResult
+ //FIXME should check that clazz match classes but this complicates a lot the
firstResult/maxResult
entityInfos[current - first] = info;
}
if ( !resultContext.containsKey( info ) ) {
Modified:
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
---
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2007-07-29
15:59:13 UTC (rev 12848)
+++
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQueryTest.java 2007-07-29
16:30:48 UTC (rev 12849)
@@ -1,21 +1,22 @@
//$Id$
package org.hibernate.search.test.query;
+import java.util.Iterator;
import java.util.List;
-import java.util.Iterator;
-import org.hibernate.search.test.SearchTestCase;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.FetchMode;
+import org.hibernate.Hibernate;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
-import org.hibernate.search.FullTextQuery;
-import org.hibernate.Transaction;
-import org.hibernate.Hibernate;
-import org.hibernate.ScrollableResults;
-import org.hibernate.FetchMode;
+import org.hibernate.search.test.SearchTestCase;
import org.hibernate.stat.Statistics;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.analysis.StopAnalyzer;
/**
@@ -98,7 +99,7 @@
Statistics stats = s.getSessionFactory().getStatistics();
stats.clear();
boolean enabled = stats.isStatisticsEnabled();
- if (!enabled) stats.setStatisticsEnabled( true );
+ if ( !enabled ) stats.setStatisticsEnabled( true );
FullTextQuery hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
assertEquals( "Exection of getResultSize without actual results", 2,
hibQuery.getResultSize() );
assertEquals( "No entity should be loaded", 0, stats.getEntityLoadCount() );
@@ -108,7 +109,7 @@
List result = hibQuery.list();
assertNotNull( result );
assertEquals( "2 entities should be loaded", 2, stats.getEntityLoadCount()
);
- if (!enabled) stats.setStatisticsEnabled( false );
+ if ( !enabled ) stats.setStatisticsEnabled( false );
for (Object element : s.createQuery( "from java.lang.Object" ).list())
s.delete( element );
tx.commit();
s.close();
@@ -311,9 +312,9 @@
Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
s.save( book );
Author emmanuel = new Author();
- emmanuel.setName("Emmanuel");
+ emmanuel.setName( "Emmanuel" );
s.save( emmanuel );
- book.getAuthors().add(emmanuel);
+ book.getAuthors().add( emmanuel );
tx.commit();
s.clear();
tx = s.beginTransaction();
@@ -324,16 +325,16 @@
List result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with no explicit criteria", 1, result.size() );
- book = (Book) result.get(0);
+ book = (Book) result.get( 0 );
assertFalse( "Association should not be inintialized",
Hibernate.isInitialized( book.getAuthors() ) );
result = s.createFullTextQuery( query ).setCriteriaQuery(
s.createCriteria( Book.class ).setFetchMode( "authors", FetchMode.JOIN )
).list();
assertNotNull( result );
assertEquals( "Query with explicit criteria", 1, result.size() );
- book = (Book) result.get(0);
+ book = (Book) result.get( 0 );
assertTrue( "Association should be inintialized", Hibernate.isInitialized(
book.getAuthors() ) );
- assertEquals( 1, book.getAuthors().size() );
+ assertEquals( 1, book.getAuthors().size() );
//cleanup
Author author = book.getAuthors().iterator().next();
@@ -344,13 +345,107 @@
s.close();
}
+ public void testScrollEmptyHits() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
+
+ ScrollableResults projections = hibQuery.scroll();
+ projections.beforeFirst();
+ projections.next();
+ Object[] projection = projections.get();
+ assertNull( projection );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10
).setMaxResults( 20 );
+
+ projections = hibQuery.scroll();
+ projections.beforeFirst();
+ projections.next();
+ projection = projections.get();
+ assertNull( projection );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testListEmptyHits() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
+ List result = hibQuery.list();
+ assertEquals( 0, result.size() );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10
).setMaxResults( 20 );
+ result = hibQuery.list();
+ assertEquals( 0, result.size() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testIterateEmptyHits() throws Exception {
+ FullTextSession s = Search.createFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
+ Iterator iter = hibQuery.iterate();
+ assertFalse( iter.hasNext() );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10
).setMaxResults( 20 );
+ iter = hibQuery.iterate();
+ assertFalse( iter.hasNext() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ private void prepEmployeeIndex(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ Employee e1 = new Employee( 1000, "Griffin", "ITech" );
+ s.save( e1 );
+ Employee e2 = new Employee( 1001, "Jackson", "Accounting" );
+ s.save( e2 );
+ Employee e3 = new Employee( 1002, "Jimenez", "ITech" );
+ s.save( e3 );
+ Employee e4 = new Employee( 1003, "Stejskal", "ITech" );
+ s.save( e4 );
+ Employee e5 = new Employee( 1004, "Whetbrook", "ITech" );
+ s.save( e5 );
+
+ tx.commit();
+ }
+
+
protected Class[] getMappings() {
return new Class[] {
Book.class,
AlternateBook.class,
Clock.class,
- Author.class
+ Author.class,
+ Employee.class
};
}
}