Author: hardy.ferentschik
Date: 2008-11-13 11:02:44 -0500 (Thu, 13 Nov 2008)
New Revision: 15563
Modified:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
search/trunk/src/java/org/hibernate/search/query/QueryHits.java
search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
Log:
HSEARCH-213
Applied patch and extented it to catch the TwoWayFieldBridge case. In order to to that I
added a flag in DocumentBuilder. Unfortunately, I have 'drag' the flag along via
FullTextQueryImpl into DocumentEctractor.
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-13
15:40:37 UTC (rev 15562)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-13
16:02:44 UTC (rev 15563)
@@ -2,9 +2,9 @@
package org.hibernate.search.engine;
import java.io.Serializable;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
-import java.lang.reflect.Method;
-import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -53,6 +53,7 @@
import org.hibernate.search.bridge.LuceneOptions;
import org.hibernate.search.bridge.TwoWayFieldBridge;
import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
+import org.hibernate.search.bridge.TwoWayStringBridge;
import org.hibernate.search.impl.InitContext;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.IndexShardingStrategy;
@@ -61,7 +62,7 @@
import org.hibernate.search.util.ScopedAnalyzer;
/**
- * Set up and provide a manager for indexes classes
+ * Set up and provide a manager for indexed classes.
*
* @author Gavin King
* @author Emmanuel Bernard
@@ -82,6 +83,14 @@
* Flag indicating whether <code>@DocumentId</code> was explicitly
specified.
*/
private boolean explicitDocumentId = false;
+
+ /**
+ * Flag indicating whether {@link org.apache.lucene.search.Searcher#doc(int,
org.apache.lucene.document.FieldSelector)}
+ * can be used in order to retrieve documents. This is only safe to do if we know that
+ * all involved bridges are implementing <code>TwoWayStringBridge</code>.
See HSEARCH-213.
+ */
+ private boolean allowFieldSelectionInProjection = false;
+
private XMember idGetter;
private Float idBoost;
public static final String CLASS_FIELDNAME = "_hibernate_class";
@@ -114,6 +123,25 @@
init( clazz, context, reflectionManager );
}
+ /**
+ * Constructor used on a non @Indexed entity.
+ */
+ public DocumentBuilder(XClass clazz, InitContext context, ReflectionManager
reflectionManager) {
+ this.entityState = EntityState.CONTAINED_IN_ONLY;
+ this.beanClass = clazz;
+ this.directoryProviders = null;
+ this.shardingStrategy = null;
+
+
+ this.reflectionManager = reflectionManager;
+ this.similarity = context.getDefaultSimilarity();
+
+ init( clazz, context, reflectionManager );
+ if ( rootPropertiesMetadata.containedInGetters.size() == 0 ) {
+ this.entityState = EntityState.NON_INDEXABLE;
+ }
+ }
+
private void init(XClass clazz, InitContext context, ReflectionManager
reflectionManager) {
if ( clazz == null ) throw new AssertionFailure( "Unable to build a
DocumentBuilder with a null class" );
rootPropertiesMetadata.boost = getBoost( clazz );
@@ -131,34 +159,42 @@
idBridge = BridgeFactory.extractTwoWayType( provided.bridge() );
idKeywordName = provided.name();
}
+
//if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails
on most database
//a TwoWayString2FieldBridgeAdaptor is never a composite id
safeFromTupleId = entityState != EntityState.INDEXED ||
TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom( idBridge.getClass() );
+ checkAllowFieldSelection();
+ if ( log.isDebugEnabled() ) {
+ log.debug( "Field selection in projections is set to {} for entity {}.",
allowFieldSelectionInProjection, clazz.getName() );
+ }
}
/**
- * Constructor used on a non @Indexed entity.
+ * Checks whether all involved bridges are two way string bridges. If so we can optimize
document retrieval
+ * by using <code>FieldSelector</code>. See HSEARCH-213.
*/
- public DocumentBuilder(XClass clazz, InitContext context, ReflectionManager
reflectionManager) {
- this.entityState = EntityState.CONTAINED_IN_ONLY;
- this.beanClass = clazz;
- this.directoryProviders = null;
- this.shardingStrategy = null;
-
-
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
-
- init( clazz, context, reflectionManager );
- if ( rootPropertiesMetadata.containedInGetters.size() == 0 ) {
- this.entityState = EntityState.NON_INDEXABLE;
+ private void checkAllowFieldSelection() {
+ allowFieldSelectionInProjection = true;
+ if ( ! (idBridge instanceof TwoWayStringBridge || idBridge instanceof
TwoWayString2FieldBridgeAdaptor) ) {
+ allowFieldSelectionInProjection = false;
+ return;
}
+ for ( FieldBridge bridge : rootPropertiesMetadata.fieldBridges) {
+ if ( !( bridge instanceof TwoWayStringBridge || bridge instanceof
TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ }
}
public boolean isRoot() {
return isRoot;
}
+ public boolean allowFieldSelectionInProjection() {
+ return allowFieldSelectionInProjection;
+ }
+
private ProvidedId findProvidedId(XClass clazz, ReflectionManager reflectionManager) {
ProvidedId id = null;
XClass currentClass = clazz;
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-13
15:40:37 UTC (rev 15562)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-13
16:02:44 UTC (rev 15563)
@@ -3,13 +3,21 @@
import java.io.IOException;
import java.io.Serializable;
+import java.util.Set;
+import java.util.Map;
+import java.util.HashMap;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelectorResult;
+import org.apache.lucene.document.MapFieldSelector;
+import org.apache.lucene.document.FieldSelector;
import org.hibernate.search.ProjectionConstants;
import org.hibernate.search.query.QueryHits;
/**
+ * Helper class to extract <code>EntityInfo</code>s out of the
<code>QueryHits</code>.
+ *
* @author Emmanuel Bernard
* @author John Griffin
* @author Hardy Ferentschik
@@ -18,13 +26,44 @@
private final SearchFactoryImplementor searchFactoryImplementor;
private final String[] projection;
private final QueryHits queryHits;
+ private FieldSelector fieldSelector;
+ private boolean allowFieldSelection;
- public DocumentExtractor(QueryHits queryHits, SearchFactoryImplementor
searchFactoryImplementor, String... projection) {
+ public DocumentExtractor(QueryHits queryHits, SearchFactoryImplementor
searchFactoryImplementor, String[] projection, Set<String> idFieldNames, boolean
allowFieldSelection) {
this.searchFactoryImplementor = searchFactoryImplementor;
this.projection = projection;
this.queryHits = queryHits;
+ this.allowFieldSelection = allowFieldSelection;
+ initFieldSelection( projection, idFieldNames );
}
+ private void initFieldSelection(String[] projection, Set<String> idFieldNames) {
+ // if we need to project DOCUMENT do not use fieldSelector as the user might want
anything
+ int projectionSize = projection != null && projection.length != 0 ?
projection.length : 0;
+ if ( projectionSize != 0 ) {
+ for ( String property : projection ) {
+ if ( ProjectionConstants.DOCUMENT.equals( property ) ) {
+ allowFieldSelection = false;
+ return;
+ }
+ }
+ }
+
+
+ // set up the field selector. CLASS_FIELDNAME and id fields are needed on top of any
projected fields
+ Map<String, FieldSelectorResult> fields = new HashMap<String,
FieldSelectorResult>( 1 + idFieldNames.size() + projectionSize );
+ fields.put( DocumentBuilder.CLASS_FIELDNAME, FieldSelectorResult.LOAD );
+ for ( String idFieldName : idFieldNames ) {
+ fields.put( idFieldName, FieldSelectorResult.LOAD );
+ }
+ if ( projectionSize != 0 ) {
+ for ( String projectedField : projection ) {
+ fields.put( projectedField, FieldSelectorResult.LOAD );
+ }
+ }
+ this.fieldSelector = new MapFieldSelector( fields );
+ }
+
private EntityInfo extract(Document document) {
Class clazz = DocumentBuilder.getDocumentClass( document );
Serializable id = DocumentBuilder.getDocumentId( searchFactoryImplementor, clazz,
document );
@@ -36,7 +75,13 @@
}
public EntityInfo extract(int index) throws IOException {
- Document doc = queryHits.doc( index );
+ Document doc;
+ if ( allowFieldSelection ) {
+ doc = queryHits.doc( index, fieldSelector );
+ }
+ else {
+ doc = queryHits.doc( index );
+ }
//TODO if we are only looking for score (unlikely), avoid accessing doc (lazy load)
EntityInfo entityInfo = extract( doc );
Object[] eip = entityInfo.projection;
Modified: search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-13
15:40:37 UTC (rev 15562)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-13
16:02:44 UTC (rev 15563)
@@ -63,7 +63,7 @@
import org.hibernate.transform.ResultTransformer;
/**
- * Implementation of {@link org.hibernate.search.FullTextQuery}
+ * Implementation of {@link org.hibernate.search.FullTextQuery}.
*
* @author Emmanuel Bernard
* @author Hardy Ferentschik
@@ -83,13 +83,20 @@
private Filter filter;
private Criteria criteria;
private String[] indexProjection;
+ private Set<String> idFieldNames;
+ private boolean allowFieldSelectionInProjection = true;
private ResultTransformer resultTransformer;
private SearchFactoryImplementor searchFactoryImplementor;
private Map<String, FullTextFilterImpl> filterDefinitions;
private int fetchSize = 1;
/**
- * classes must be immutable
+ * Constructs a <code>FullTextQueryImpl</code> instance.
+ *
+ * @param query The Lucene query
+ * @param classes Array of classes (must be immutable) used to filter the results to
the given class types.
+ * @param session Access to the Hibernate session.
+ * @param parameterMetadata Additional query metadata.
*/
public FullTextQueryImpl(org.apache.lucene.search.Query query, Class[] classes,
SessionImplementor session,
ParameterMetadata parameterMetadata) {
@@ -139,7 +146,9 @@
int size = max - first + 1 < 0 ? 0 : max - first + 1;
List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
- DocumentExtractor extractor = new DocumentExtractor( queryHits,
searchFactoryImplementor, indexProjection );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactoryImplementor, indexProjection, idFieldNames,
allowFieldSelectionInProjection
+ );
for ( int index = first; index <= max; index++ ) {
//TODO use indexSearcher.getIndexReader().document( hits.id(index),
FieldSelector(indexProjection) );
infos.add( extractor.extract( index ) );
@@ -208,7 +217,7 @@
public ScrollableResults scroll() throws HibernateException {
//keep the searcher open until the resultset is closed
- SearchFactoryImplementor searchFactory = ContextHelper.getSearchFactoryBySFI( session
);
+ SearchFactoryImplementor searchFactory = getSearchFactoryImplementor();
//find the directories
IndexSearcher searcher = buildSearcher( searchFactory );
@@ -217,7 +226,9 @@
QueryHits queryHits = getQueryHits( searcher );
int first = first();
int max = max( first, queryHits.totalHits );
- DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactory,
indexProjection );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactory, indexProjection,
idFieldNames,allowFieldSelectionInProjection
+ );
Loader loader = getLoader( ( Session ) this.session, searchFactory );
return new ScrollableResultsImpl(
searcher, first, max, fetchSize, extractor, loader, searchFactory
@@ -241,7 +252,7 @@
}
public List list() throws HibernateException {
- SearchFactoryImplementor searchFactoryImplementor =
ContextHelper.getSearchFactoryBySFI( session );
+ SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
//find the directories
IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
if ( searcher == null ) {
@@ -255,7 +266,9 @@
int size = max - first + 1 < 0 ? 0 : max - first + 1;
List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
- DocumentExtractor extractor = new DocumentExtractor( queryHits,
searchFactoryImplementor, indexProjection );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactoryImplementor, indexProjection, idFieldNames,
allowFieldSelectionInProjection
+ );
for ( int index = first; index <= max; index++ ) {
infos.add( extractor.extract( index ) );
}
@@ -383,7 +396,7 @@
}
private Filter createFilter(FilterDef def, Object instance) {
- Filter filter = null;
+ Filter filter;
if ( def.getFactoryMethod() != null ) {
try {
filter = ( Filter ) def.getFactoryMethod().invoke( instance );
@@ -558,12 +571,16 @@
/**
- * can return null
+ * Build the index searcher for this fulltext query.
+ *
+ * @param searchFactoryImplementor the search factory.
+ * @return the <code>IndexSearcher</code> for this query (can be
<code>null</code>.
* TODO change classesAndSubclasses by side effect, which is a mismatch with the
Searcher return, fix that.
*/
private IndexSearcher buildSearcher(SearchFactoryImplementor searchFactoryImplementor)
{
Map<Class<?>, DocumentBuilder<?>> builders =
searchFactoryImplementor.getDocumentBuilders();
List<DirectoryProvider> directories = new ArrayList<DirectoryProvider>();
+ Set<String> idFieldNames = new HashSet<String>();
Similarity searcherSimilarity = null;
//TODO check if caching this work for the last n list of classes makes a perf boost
@@ -572,15 +589,19 @@
// but we have to make sure there is at least one
if ( builders.isEmpty() ) {
throw new HibernateException(
- "There are no mapped entities (don't forget to add @Indexed to at least
one class)."
+ "There are no mapped entities. Don't forget to add @Indexed to at least
one class."
);
}
for ( DocumentBuilder builder : builders.values() ) {
searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
+ if ( builder.getIdKeywordName() != null ) {
+ idFieldNames.add( builder.getIdKeywordName() );
+ allowFieldSelectionInProjection = allowFieldSelectionInProjection &&
builder.allowFieldSelectionInProjection();
+ }
final DirectoryProvider[] directoryProviders =
builder.getDirectoryProviderSelectionStrategy()
.getDirectoryProvidersForAllShards();
- populateDirectories( directories, directoryProviders, searchFactoryImplementor );
+ populateDirectories( directories, directoryProviders );
}
classesAndSubclasses = null;
}
@@ -600,14 +621,18 @@
if ( builder == null ) {
throw new HibernateException( "Not a mapped entity (don't forget to add
@Indexed): " + clazz );
}
-
+ if ( builder.getIdKeywordName() != null ) {
+ idFieldNames.add( builder.getIdKeywordName() );
+ allowFieldSelectionInProjection = allowFieldSelectionInProjection &&
builder.allowFieldSelectionInProjection();
+ }
final DirectoryProvider[] directoryProviders =
builder.getDirectoryProviderSelectionStrategy()
.getDirectoryProvidersForAllShards();
searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
- populateDirectories( directories, directoryProviders, searchFactoryImplementor );
+ populateDirectories( directories, directoryProviders );
}
- classesAndSubclasses = involvedClasses;
+ this.classesAndSubclasses = involvedClasses;
}
+ this.idFieldNames = idFieldNames;
//compute optimization needClassFilterClause
//if at least one DP contains one class that is not part of the targeted
classesAndSubclasses we can't optimize
@@ -643,8 +668,7 @@
return is;
}
- private void populateDirectories(List<DirectoryProvider> directories,
DirectoryProvider[] directoryProviders,
- SearchFactoryImplementor searchFactoryImplementor) {
+ private void populateDirectories(List<DirectoryProvider> directories,
DirectoryProvider[] directoryProviders) {
for ( DirectoryProvider provider : directoryProviders ) {
if ( !directories.contains( provider ) ) {
directories.add( provider );
@@ -677,7 +701,7 @@
public int getResultSize() {
if ( resultSize == null ) {
//get result size without object initialization
- SearchFactoryImplementor searchFactoryImplementor =
ContextHelper.getSearchFactoryBySFI( session );
+ SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
if ( searcher == null ) {
resultSize = 0;
Modified: search/trunk/src/java/org/hibernate/search/query/QueryHits.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/QueryHits.java 2008-11-13 15:40:37
UTC (rev 15562)
+++ search/trunk/src/java/org/hibernate/search/query/QueryHits.java 2008-11-13 16:02:44
UTC (rev 15563)
@@ -1,4 +1,4 @@
-// $Id:$
+// $Id$
/*
* JBoss, Home of Professional Open Source
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
@@ -20,6 +20,7 @@
import java.io.IOException;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.ScoreDoc;
@@ -59,6 +60,10 @@
return searcher.doc( docId( index ) );
}
+ public Document doc(int index, FieldSelector selector) throws IOException {
+ return searcher.doc( docId( index ), selector );
+ }
+
public ScoreDoc scoreDoc(int index) throws IOException {
if ( index >= totalHits ) {
throw new SearchException("Not a valid ScoreDoc index: " + index);
Modified: search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2008-11-13
15:40:37 UTC (rev 15562)
+++
search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2008-11-13
16:02:44 UTC (rev 15563)
@@ -12,6 +12,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Query;
+
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.Transaction;
@@ -39,7 +40,15 @@
Query query = parser.parse( "dept:ITech" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
// Is the 'FullTextQuery.ID' value correct here? Do we want the Lucene internal
document number?
- hibQuery.setProjection( "id", "lastname", "dept",
FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection(
+ "id",
+ "lastname",
+ "dept",
+ FullTextQuery.THIS,
+ FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT,
+ FullTextQuery.ID
+ );
ScrollableResults projections = hibQuery.scroll();
@@ -78,7 +87,9 @@
assertNull( projection );
//cleanup
- for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ for ( Object element : s.createQuery( "from " + Employee.class.getName()
).list() ) {
+ s.delete( element );
+ }
tx.commit();
s.close();
}
@@ -97,7 +108,8 @@
hibQuery.setProjection( "id", "lastname", "dept",
FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.ID );
hibQuery.setResultTransformer( new ProjectionToDelimStringResultTransformer() );
- List<String> result = (List<String>) hibQuery.list();
+ @SuppressWarnings( "unchecked" )
+ List<String> result = hibQuery.list();
assertTrue( "incorrect transformation", result.get( 0 ).startsWith(
"1000, Griffin, ITech" ) );
assertTrue( "incorrect transformation", result.get( 1 ).startsWith(
"1002, Jimenez, ITech" ) );
@@ -120,19 +132,29 @@
Query query = parser.parse( "dept:ITech" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
- hibQuery.setProjection( "id", "lastname", "dept",
FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection(
+ "id",
+ "lastname",
+ "dept",
+ FullTextQuery.THIS,
+ FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT,
+ FullTextQuery.ID
+ );
hibQuery.setResultTransformer( new ProjectionToMapResultTransformer() );
List transforms = hibQuery.list();
- Map map = (Map) transforms.get( 1 );
+ Map map = ( Map ) transforms.get( 1 );
assertEquals( "incorrect transformation", "ITech", map.get(
"dept" ) );
assertEquals( "incorrect transformation", 1002, map.get( "id" ) );
assertTrue( "incorrect transformation", map.get( FullTextQuery.DOCUMENT )
instanceof Document );
- assertEquals( "incorrect transformation", "1002", ( (Document)
map.get( FullTextQuery.DOCUMENT ) ).get( "id" ) );
+ assertEquals(
+ "incorrect transformation", "1002", ( ( Document ) map.get(
FullTextQuery.DOCUMENT ) ).get( "id" )
+ );
//cleanup
- for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) {
+ for ( Object element : s.createQuery( "from " + Employee.class.getName()
).list() ) {
s.delete( element );
}
tx.commit();
@@ -143,10 +165,10 @@
assertEquals( "id incorrect", 1000, projection[0] );
assertEquals( "lastname incorrect", "Griffin", projection[1] );
assertEquals( "dept incorrect", "ITech", projection[2] );
- assertEquals( "THIS incorrect", projection[3], s.get( Employee.class,
(Serializable) projection[0] ) );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (
Serializable ) projection[0] ) );
assertEquals( "SCORE incorrect", 1.0F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 4, ( (Document) projection[5]
).getFields().size() );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5]
).getFields().size() );
assertEquals( "legacy ID incorrect", 1000, projection[6] );
}
@@ -154,10 +176,10 @@
assertEquals( "id incorrect", 1004, projection[0] );
assertEquals( "lastname incorrect", "Whetbrook", projection[1] );
assertEquals( "dept incorrect", "ITech", projection[2] );
- assertEquals( "THIS incorrect", projection[3], s.get( Employee.class,
(Serializable) projection[0] ) );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (
Serializable ) projection[0] ) );
assertEquals( "SCORE incorrect", 1.0F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 4, ( (Document) projection[5]
).getFields().size() );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5]
).getFields().size() );
assertEquals( "legacy ID incorrect", 1004, projection[6] );
}
@@ -165,10 +187,10 @@
assertEquals( "id incorrect", 1003, projection[0] );
assertEquals( "lastname incorrect", "Stejskal", projection[1] );
assertEquals( "dept incorrect", "ITech", projection[2] );
- assertEquals( "THIS incorrect", projection[3], s.get( Employee.class,
(Serializable) projection[0] ) );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (
Serializable ) projection[0] ) );
assertEquals( "SCORE incorrect", 1.0F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 4, ( (Document) projection[5]
).getFields().size() );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5]
).getFields().size() );
assertEquals( "legacy ID incorrect", 1003, projection[6] );
}
@@ -183,25 +205,29 @@
Query query = parser.parse( "dept:ITech" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
- hibQuery.setProjection( "id", "lastname", "dept",
FullTextQuery.THIS, FullTextQuery.SCORE,
- FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection(
+ "id", "lastname", "dept", FullTextQuery.THIS,
FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT, FullTextQuery.ID
+ );
int counter = 0;
- for (Iterator iter = hibQuery.iterate(); iter.hasNext();) {
- Object[] projection = (Object[]) iter.next();
+ for ( Iterator iter = hibQuery.iterate(); iter.hasNext(); ) {
+ Object[] projection = ( Object[] ) iter.next();
assertNotNull( projection );
counter++;
assertEquals( "dept incorrect", "ITech", projection[2] );
- assertEquals( "THIS incorrect", projection[3], s.get( Employee.class,
(Serializable) projection[0] ) );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (
Serializable ) projection[0] ) );
assertEquals( "SCORE incorrect", 1.0F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 4, ( (Document) projection[5]
).getFields().size() );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5]
).getFields().size() );
}
assertEquals( "incorrect number of results returned", 4, counter );
//cleanup
- for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ for ( Object element : s.createQuery( "from " + Employee.class.getName()
).list() ) {
+ s.delete( element );
+ }
tx.commit();
s.close();
}
@@ -217,38 +243,42 @@
Query query = parser.parse( "dept:Accounting" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
- hibQuery.setProjection( "id", "lastname", "dept",
FullTextQuery.THIS, FullTextQuery.SCORE,
- FullTextQuery.DOCUMENT, FullTextQuery.ID, FullTextQuery.DOCUMENT_ID );
+ hibQuery.setProjection(
+ "id", "lastname", "dept", FullTextQuery.THIS,
FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT, FullTextQuery.ID, FullTextQuery.DOCUMENT_ID
+ );
List result = hibQuery.list();
assertNotNull( result );
- Object[] projection = (Object[]) result.get( 0 );
+ Object[] projection = ( Object[] ) result.get( 0 );
assertNotNull( projection );
assertEquals( "id incorrect", 1001, projection[0] );
assertEquals( "last name incorrect", "Jackson", projection[1] );
assertEquals( "dept incorrect", "Accounting", projection[2] );
- assertEquals( "THIS incorrect", "Jackson", ( (Employee)
projection[3] ).getLastname() );
- assertEquals( "THIS incorrect", projection[3], s.get( Employee.class,
(Serializable) projection[0] ) );
+ assertEquals( "THIS incorrect", "Jackson", ( ( Employee )
projection[3] ).getLastname() );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (
Serializable ) projection[0] ) );
assertEquals( "SCORE incorrect", 1.9162908F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[5]
).getFields().size() );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[5]
).getFields().size() );
assertEquals( "ID incorrect", 1001, projection[6] );
assertNotNull( "Lucene internal doc id", projection[7] );
// Change the projection order and null one
- hibQuery.setProjection( FullTextQuery.DOCUMENT, FullTextQuery.THIS,
FullTextQuery.SCORE, null, FullTextQuery.ID,
- "id", "lastname", "dept", "hireDate",
FullTextQuery.DOCUMENT_ID );
+ hibQuery.setProjection(
+ FullTextQuery.DOCUMENT, FullTextQuery.THIS, FullTextQuery.SCORE, null,
FullTextQuery.ID,
+ "id", "lastname", "dept", "hireDate",
FullTextQuery.DOCUMENT_ID
+ );
result = hibQuery.list();
assertNotNull( result );
- projection = (Object[]) result.get( 0 );
+ projection = ( Object[] ) result.get( 0 );
assertNotNull( projection );
assertTrue( "DOCUMENT incorrect", projection[0] instanceof Document );
- assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[0]
).getFields().size() );
- assertEquals( "THIS incorrect", projection[1], s.get( Employee.class,
(Serializable) projection[4] ) );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[0]
).getFields().size() );
+ assertEquals( "THIS incorrect", projection[1], s.get( Employee.class, (
Serializable ) projection[4] ) );
assertEquals( "SCORE incorrect", 1.9162908F, projection[2] );
assertNull( "BOOST not removed", projection[3] );
assertEquals( "ID incorrect", 1001, projection[4] );
@@ -259,11 +289,59 @@
assertNotNull( "Lucene internal doc id", projection[9] );
//cleanup
- for (Object element : s.createQuery( "from " + Employee.class.getName()
).list()) s.delete( element );
+ for ( Object element : s.createQuery( "from " + Employee.class.getName()
).list() ) {
+ s.delete( element );
+ }
tx.commit();
s.close();
}
+ public void testNonLoadedFieldOptmization() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( "dept", new StandardAnalyzer() );
+
+ Query query = parser.parse( "dept:Accounting" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query,
Employee.class );
+ hibQuery.setProjection( FullTextQuery.ID, FullTextQuery.DOCUMENT );
+
+ List result = hibQuery.list();
+ assertNotNull( result );
+
+ Object[] projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+ assertEquals( "id field name not projected", 1001, projection[0] );
+ assertEquals(
+ "Document fields should not be lazy on DOCUMENT projection",
+ "Jackson", ( ( Document ) projection[1] ).getField( "lastname"
).stringValue()
+ );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[1]
).getFields().size() );
+
+ // Change the projection order and null one
+ hibQuery.setProjection( FullTextQuery.THIS, FullTextQuery.SCORE, null,
"lastname" );
+
+ result = hibQuery.list();
+ assertNotNull( result );
+
+ projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+
+ assertTrue( "THIS incorrect", projection[0] instanceof Employee );
+ assertEquals( "SCORE incorrect", 1.9162908F, projection[1] );
+ assertEquals( "last name incorrect", "Jackson", projection[3] );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName()
).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
private void prepEmployeeIndex(FullTextSession s) {
Transaction tx = s.beginTransaction();
Employee e1 = new Employee( 1000, "Griffin", "ITech" );
@@ -284,7 +362,11 @@
public void testProjection() throws Exception {
FullTextSession s = Search.getFullTextSession( openSession() );
Transaction tx = s.beginTransaction();
- Book book = new Book( 1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah" );
+ Book book = new Book(
+ 1,
+ "La chute de la petite reine a travers les yeux de Festina",
+ "La chute de la petite reine a travers les yeux de Festina, blahblah"
+ );
s.save( book );
Book book2 = new Book( 2, "Sous les fleurs il n'y a rien", null );
s.save( book2 );
@@ -304,7 +386,7 @@
List result = hibQuery.list();
assertNotNull( result );
assertEquals( "Query with no explicit criteria", 1, result.size() );
- Object[] projection = (Object[]) result.get( 0 );
+ Object[] projection = ( Object[] ) result.get( 0 );
assertEquals( "id", 1, projection[0] );
assertEquals( "summary", "La chute de la petite reine a travers les yeux
de Festina", projection[1] );
assertEquals( "mainAuthor.name (embedded objects)", "Emmanuel",
projection[2] );
@@ -313,10 +395,10 @@
hibQuery.setProjection( "id", "body", "mainAuthor.name"
);
try {
- result = hibQuery.list();
+ hibQuery.list();
fail( "Projecting an unstored field should raise an exception" );
}
- catch (SearchException e) {
+ catch ( SearchException e ) {
//success
}
@@ -340,12 +422,16 @@
hibQuery.setProjection( "id", "summary",
"mainAuthor.name" );
result = hibQuery.list();
assertEquals( 1, result.size() );
- projection = (Object[]) result.get( 0 );
+ projection = ( Object[] ) result.get( 0 );
assertEquals( "mainAuthor.name", null, projection[2] );
//cleanup
- for (Object element : s.createQuery( "from " + Book.class.getName() ).list())
s.delete( element );
- for (Object element : s.createQuery( "from " + Author.class.getName()
).list()) s.delete( element );
+ for ( Object element : s.createQuery( "from " + Book.class.getName() ).list()
) {
+ s.delete( element );
+ }
+ for ( Object element : s.createQuery( "from " + Author.class.getName()
).list() ) {
+ s.delete( element );
+ }
tx.commit();
s.close();
}