[hibernate-commits] Hibernate SVN: r12817 - in trunk/HibernateExt/search/src: java/org/hibernate/search/bridge and 3 other directories.

hibernate-commits at lists.jboss.org hibernate-commits at lists.jboss.org
Wed Jul 25 16:30:52 EDT 2007


Author: epbernard
Date: 2007-07-25 16:30:51 -0400 (Wed, 25 Jul 2007)
New Revision: 12817

Modified:
   trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Field.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/bridge/BridgeFactory.java
   trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/Cloud.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/Book.java
   trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQuerySortTest.java
Log:
HSEARCH-45 Support for multiple fields per property

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Field.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Field.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Field.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -41,4 +41,9 @@
 	 */
 	Analyzer analyzer() default @Analyzer;
 
+	/**
+	 * Field bridge used. Default is autowired.
+	 */
+	FieldBridge bridge() default @FieldBridge;
+
 }

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/bridge/BridgeFactory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/bridge/BridgeFactory.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/bridge/BridgeFactory.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -22,7 +22,9 @@
 import org.hibernate.search.bridge.builtin.BooleanBridge;
 import org.hibernate.search.annotations.Resolution;
 import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.Field;
 import org.hibernate.search.SearchException;
+import org.hibernate.search.util.BinderHelper;
 import org.hibernate.annotations.common.reflection.XClass;
 import org.hibernate.annotations.common.reflection.XMember;
 
@@ -82,10 +84,16 @@
 		builtInBridges.put( Date.class.getName(), DATE_MILLISECOND );
 	}
 
-	public static FieldBridge guessType(XMember member) {
+	public static FieldBridge guessType(Field field, XMember member) {
 		FieldBridge bridge = null;
-		org.hibernate.search.annotations.FieldBridge bridgeAnn =
-				member.getAnnotation( org.hibernate.search.annotations.FieldBridge.class );
+		org.hibernate.search.annotations.FieldBridge bridgeAnn;
+		//@Field bridge has priority over @FieldBridge
+		if ( field != null && void.class != field.bridge().impl() ) {
+			bridgeAnn = field.bridge();
+		}
+		else {
+			bridgeAnn = member.getAnnotation( org.hibernate.search.annotations.FieldBridge.class );
+		}
 		if ( bridgeAnn != null ) {
 			Class impl = bridgeAnn.impl();
 			try {

Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -18,7 +18,6 @@
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.Term;
 import org.hibernate.Hibernate;
-import org.hibernate.proxy.HibernateProxy;
 import org.hibernate.annotations.common.AssertionFailure;
 import org.hibernate.annotations.common.reflection.ReflectionManager;
 import org.hibernate.annotations.common.reflection.XAnnotatedElement;
@@ -26,6 +25,7 @@
 import org.hibernate.annotations.common.reflection.XMember;
 import org.hibernate.annotations.common.reflection.XProperty;
 import org.hibernate.annotations.common.util.ReflectHelper;
+import org.hibernate.proxy.HibernateProxy;
 import org.hibernate.search.SearchException;
 import org.hibernate.search.annotations.Boost;
 import org.hibernate.search.annotations.ContainedIn;
@@ -88,7 +88,7 @@
 		rootPropertiesMetadata.analyzer = defaultAnalyzer;
 		Set<XClass> processedClasses = new HashSet<XClass>();
 		processedClasses.add( clazz );
-		initializeMembers(clazz, rootPropertiesMetadata, true, "", processedClasses );
+		initializeMembers( clazz, rootPropertiesMetadata, true, "", processedClasses );
 		//processedClasses.remove( clazz ); for the sake of completness
 		this.analyzer.setGlobalAnalyzer( rootPropertiesMetadata.analyzer );
 		if ( idKeywordName == null ) {
@@ -125,27 +125,27 @@
 	private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
 								   Set<XClass> processedClasses) {
 		List<XClass> hierarchy = new ArrayList<XClass>();
-		for ( XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass() ) {
+		for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass()) {
 			hierarchy.add( currClass );
 		}
-		for (int index = hierarchy.size() -1 ; index >= 0 ; index--) {
+		for (int index = hierarchy.size() - 1; index >= 0; index--) {
 			XClass currClass = hierarchy.get( index );
 			/**
 			 * Override the default analyzer for the properties if the class hold one
 			 * That's the reason we go down the hierarchy
 			 */
 			Analyzer analyzer = getAnalyzer( currClass );
-			if ( analyzer != null) {
+			if ( analyzer != null ) {
 				propertiesMetadata.analyzer = analyzer;
 			}
 			//rejecting non properties because the object is loaded from Hibernate, so indexing a non property does not make sense
 			List<XProperty> methods = currClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
-			for ( XProperty method : methods ) {
+			for (XProperty method : methods) {
 				initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses );
 			}
 
 			List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD );
-			for ( XProperty field : fields ) {
+			for (XProperty field : fields) {
 				initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses );
 			}
 		}
@@ -163,7 +163,7 @@
 			if ( isRoot && keywordAnn.id() ) {
 				idKeywordName = name;
 				idBoost = getBoost( member );
-				FieldBridge fieldBridge = BridgeFactory.guessType( member );
+				FieldBridge fieldBridge = BridgeFactory.guessType( null, member );
 				if ( fieldBridge instanceof TwoWayFieldBridge ) {
 					idBridge = (TwoWayFieldBridge) fieldBridge;
 				}
@@ -178,7 +178,7 @@
 				setAccessible( member );
 				propertiesMetadata.keywordGetters.add( member );
 				propertiesMetadata.keywordNames.add( name );
-				propertiesMetadata.keywordBridges.add( BridgeFactory.guessType( member ) );
+				propertiesMetadata.keywordBridges.add( BridgeFactory.guessType( null, member ) );
 			}
 		}
 
@@ -187,7 +187,7 @@
 			setAccessible( member );
 			propertiesMetadata.unstoredGetters.add( member );
 			propertiesMetadata.unstoredNames.add( prefix + BinderHelper.getAttributeName( member, unstoredAnn.name() ) );
-			propertiesMetadata.unstoredBridges.add( BridgeFactory.guessType( member ) );
+			propertiesMetadata.unstoredBridges.add( BridgeFactory.guessType( null, member ) );
 		}
 
 		Text textAnn = member.getAnnotation( Text.class );
@@ -195,7 +195,7 @@
 			setAccessible( member );
 			propertiesMetadata.textGetters.add( member );
 			propertiesMetadata.textNames.add( prefix + BinderHelper.getAttributeName( member, textAnn.name() ) );
-			propertiesMetadata.textBridges.add( BridgeFactory.guessType( member ) );
+			propertiesMetadata.textBridges.add( BridgeFactory.guessType( null, member ) );
 		}
 
 		DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
@@ -205,7 +205,7 @@
 						+ idKeywordName + " and " + BinderHelper.getAttributeName( member, documentIdAnn.name() ) );
 			}
 			idKeywordName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
-			FieldBridge fieldBridge = BridgeFactory.guessType( member );
+			FieldBridge fieldBridge = BridgeFactory.guessType( null, member );
 			if ( fieldBridge instanceof TwoWayFieldBridge ) {
 				idBridge = (TwoWayFieldBridge) fieldBridge;
 			}
@@ -217,24 +217,22 @@
 			setAccessible( member );
 			idGetter = member;
 		}
-
-		org.hibernate.search.annotations.Field fieldAnn =
-				member.getAnnotation( org.hibernate.search.annotations.Field.class );
-		if ( fieldAnn != null ) {
-			setAccessible( member );
-			propertiesMetadata.fieldGetters.add( member );
-			String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
-			propertiesMetadata.fieldNames.add( fieldName );
-			propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
-			propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
-			propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( member ) );
-			// Field > property > entity analyzer
-			Analyzer analyzer = getAnalyzer( fieldAnn.analyzer() );
-			if (analyzer == null) analyzer = getAnalyzer( member );
-			if (analyzer == null) analyzer = propertiesMetadata.analyzer;
-			if (analyzer == null) throw new AssertionFailure( "Analizer should not be undefined" );
-			this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+		{
+			org.hibernate.search.annotations.Field fieldAnn =
+					member.getAnnotation( org.hibernate.search.annotations.Field.class );
+			if ( fieldAnn != null ) {
+				bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn );
+			}
 		}
+		{
+			org.hibernate.search.annotations.Fields fieldsAnn =
+					member.getAnnotation( org.hibernate.search.annotations.Fields.class );
+			if ( fieldsAnn != null ) {
+				for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value()) {
+					bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn );
+				}
+			}
+		}
 
 		IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
 		if ( embeddedAnn != null ) {
@@ -247,41 +245,41 @@
 					&& processedClasses.contains( elementClass ) ) {
 				throw new SearchException(
 						"Circular reference. Duplicate use of "
-						+ elementClass.getName()
-						+ " in root entity " + beanClass.getName()
-						+ "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
+								+ elementClass.getName()
+								+ " in root entity " + beanClass.getName()
+								+ "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
 				);
 			}
-			if (level <= maxLevel) {
+			if ( level <= maxLevel ) {
 				processedClasses.add( elementClass ); //push
 
 				setAccessible( member );
 				propertiesMetadata.embeddedGetters.add( member );
 				PropertiesMetadata metadata = new PropertiesMetadata();
-				propertiesMetadata.embeddedPropertiesMetadata.add(metadata);
+				propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
 				metadata.boost = getBoost( member );
 				//property > entity analyzer
 				Analyzer analyzer = getAnalyzer( member );
 				metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
 				String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
-				initializeMembers( elementClass, metadata, false, localPrefix, processedClasses);
+				initializeMembers( elementClass, metadata, false, localPrefix, processedClasses );
 				/**
 				 * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
 				 */
 				if ( member.isArray() ) {
-					propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.ARRAY);
+					propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
 				}
 				else if ( member.isCollection() ) {
 					if ( Map.class.equals( member.getCollectionClass() ) ) {
 						//hum subclasses etc etc??
-						propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.MAP);
+						propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
 					}
 					else {
-						propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.COLLECTION);
+						propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION );
 					}
 				}
 				else {
-					propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.OBJECT);
+					propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
 				}
 
 				processedClasses.remove( elementClass ); //pop
@@ -302,6 +300,22 @@
 		}
 	}
 
+	private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn) {
+		setAccessible( member );
+		propertiesMetadata.fieldGetters.add( member );
+		String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
+		propertiesMetadata.fieldNames.add( fieldName );
+		propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
+		propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
+		propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member ) );
+		// Field > property > entity analyzer
+		Analyzer analyzer = getAnalyzer( fieldAnn.analyzer() );
+		if ( analyzer == null ) analyzer = getAnalyzer( member );
+		if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+		if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
+		this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+	}
+
 	private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member) {
 		String localPrefix = prefix;
 		if ( ".".equals( embeddedAnn.prefix() ) ) {
@@ -315,7 +329,7 @@
 	}
 
 	private Field.Store getStore(Store store) {
-		switch (store) {
+		switch ( store ) {
 			case NO:
 				return Field.Store.NO;
 			case YES:
@@ -328,7 +342,7 @@
 	}
 
 	private Field.Index getIndex(Index index) {
-		switch (index) {
+		switch ( index ) {
 			case NO:
 				return Field.Index.NO;
 			case NO_NORMS:
@@ -364,7 +378,7 @@
 	public void addWorkToQueue(T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
 		Class entityClass = Hibernate.getClass( entity );
 		//TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition 
-		for ( LuceneWork luceneWork : queue) {
+		for (LuceneWork luceneWork : queue) {
 			//whatever the actual work, we should ignore
 			if ( luceneWork.getEntityClass() == entityClass
 					&& luceneWork.getId().equals( id ) ) {//find a way to use Type.equals(x,y)
@@ -379,7 +393,7 @@
 			searchForContainers = true;
 		}
 		else if ( workType == WorkType.DELETE ) {
-			queue.add( new DeleteLuceneWork(id, entityClass) );
+			queue.add( new DeleteLuceneWork( id, entityClass ) );
 		}
 		else if ( workType == WorkType.UPDATE ) {
 			Document doc = getDocument( entity, id );
@@ -390,21 +404,21 @@
 			 * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
 			 * double file opening.
 			 */
-			queue.add( new DeleteLuceneWork(id, entityClass) );
+			queue.add( new DeleteLuceneWork( id, entityClass ) );
 			queue.add( new AddLuceneWork( id, entityClass, doc ) );
 			searchForContainers = true;
 		}
 		else if ( workType == WorkType.INDEX ) {
 			Document doc = getDocument( entity, id );
-			queue.add(new DeleteLuceneWork(id, entityClass) );
+			queue.add( new DeleteLuceneWork( id, entityClass ) );
 			LuceneWork work = new AddLuceneWork( id, entityClass, doc );
-			work.setBatch(true);
-			queue.add(work);
+			work.setBatch( true );
+			queue.add( work );
 			searchForContainers = true;
-		}		
-		
+		}
+
 		else {
-			throw new AssertionFailure("Unknown WorkType: " + workType);
+			throw new AssertionFailure( "Unknown WorkType: " + workType );
 		}
 
 		/**
@@ -412,23 +426,23 @@
 		 * have to be updated)
 		 * When the internal object is changed, we apply the {Add|Update}Work on containedIns
 		 */
-		if (searchForContainers) {
-			processContainedIn(entity, queue, rootPropertiesMetadata, searchFactoryImplementor);
+		if ( searchForContainers ) {
+			processContainedIn( entity, queue, rootPropertiesMetadata, searchFactoryImplementor );
 		}
 	}
 
 	private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
-		for ( int i = 0; i < metadata.containedInGetters.size(); i++ ) {
+		for (int i = 0; i < metadata.containedInGetters.size(); i++) {
 			XMember member = metadata.containedInGetters.get( i );
 			Object value = getMemberValue( instance, member );
-			if (value == null) continue;
+			if ( value == null ) continue;
 
 			if ( member.isArray() ) {
-				for ( Object arrayValue : (Object[]) value ) {
+				for (Object arrayValue : (Object[]) value) {
 					//highly inneficient but safe wrt the actual targeted class
 					Class valueClass = Hibernate.getClass( arrayValue );
 					DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
-					if (builder == null) continue;
+					if ( builder == null ) continue;
 					processContainedInValue( arrayValue, queue, valueClass, builder, searchFactoryImplementor );
 				}
 			}
@@ -441,18 +455,18 @@
 				else {
 					collection = (Collection) value;
 				}
-				for ( Object collectionValue : collection ) {
+				for (Object collectionValue : collection) {
 					//highly inneficient but safe wrt the actual targeted class
 					Class valueClass = Hibernate.getClass( collectionValue );
 					DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
-					if (builder == null) continue;
+					if ( builder == null ) continue;
 					processContainedInValue( collectionValue, queue, valueClass, builder, searchFactoryImplementor );
 				}
 			}
 			else {
 				Class valueClass = Hibernate.getClass( value );
 				DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
-				if (builder == null) continue;
+				if ( builder == null ) continue;
 				processContainedInValue( value, queue, valueClass, builder, searchFactoryImplementor );
 			}
 		}
@@ -483,10 +497,10 @@
 	}
 
 	private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata) {
-		if (instance == null) return;
+		if ( instance == null ) return;
 		//needed for field access: I cannot work in the proxied version
 		Object unproxiedInstance = unproxy( instance );
-		for ( int i = 0; i < propertiesMetadata.keywordNames.size(); i++ ) {
+		for (int i = 0; i < propertiesMetadata.keywordNames.size(); i++) {
 			XMember member = propertiesMetadata.keywordGetters.get( i );
 			Object value = getMemberValue( unproxiedInstance, member );
 			propertiesMetadata.keywordBridges.get( i ).set(
@@ -494,7 +508,7 @@
 					Field.Index.UN_TOKENIZED, getBoost( member )
 			);
 		}
-		for ( int i = 0; i < propertiesMetadata.textNames.size(); i++ ) {
+		for (int i = 0; i < propertiesMetadata.textNames.size(); i++) {
 			XMember member = propertiesMetadata.textGetters.get( i );
 			Object value = getMemberValue( unproxiedInstance, member );
 			propertiesMetadata.textBridges.get( i ).set(
@@ -502,7 +516,7 @@
 					Field.Index.TOKENIZED, getBoost( member )
 			);
 		}
-		for ( int i = 0; i < propertiesMetadata.unstoredNames.size(); i++ ) {
+		for (int i = 0; i < propertiesMetadata.unstoredNames.size(); i++) {
 			XMember member = propertiesMetadata.unstoredGetters.get( i );
 			Object value = getMemberValue( unproxiedInstance, member );
 			propertiesMetadata.unstoredBridges.get( i ).set(
@@ -510,7 +524,7 @@
 					Field.Index.TOKENIZED, getBoost( member )
 			);
 		}
-		for ( int i = 0; i < propertiesMetadata.fieldNames.size(); i++ ) {
+		for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++) {
 			XMember member = propertiesMetadata.fieldGetters.get( i );
 			Object value = getMemberValue( unproxiedInstance, member );
 			propertiesMetadata.fieldBridges.get( i ).set(
@@ -518,26 +532,26 @@
 					propertiesMetadata.fieldIndex.get( i ), getBoost( member )
 			);
 		}
-		for ( int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++ ) {
+		for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++) {
 			XMember member = propertiesMetadata.embeddedGetters.get( i );
 			Object value = getMemberValue( unproxiedInstance, member );
 			//TODO handle boost at embedded level: already stored in propertiesMedatada.boost
 
-			if (value == null) continue;
+			if ( value == null ) continue;
 			PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get( i );
-			switch( propertiesMetadata.embeddedContainers.get(i) ) {
+			switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
 				case ARRAY:
-					for ( Object arrayValue : (Object[]) value ) {
+					for (Object arrayValue : (Object[]) value) {
 						buildDocumentFields( arrayValue, doc, embeddedMetadata );
 					}
 					break;
 				case COLLECTION:
-					for ( Object collectionValue : (Collection) value ) {
+					for (Object collectionValue : (Collection) value) {
 						buildDocumentFields( collectionValue, doc, embeddedMetadata );
 					}
 					break;
 				case MAP:
-					for ( Object collectionValue : ( (Map) value ).values() ) {
+					for (Object collectionValue : ( (Map) value ).values()) {
 						buildDocumentFields( collectionValue, doc, embeddedMetadata );
 					}
 					break;
@@ -545,8 +559,8 @@
 					buildDocumentFields( value, doc, embeddedMetadata );
 					break;
 				default:
-					throw new AssertionFailure("Unknown embedded container: "
-							+ propertiesMetadata.embeddedContainers.get(i) );
+					throw new AssertionFailure( "Unknown embedded container: "
+							+ propertiesMetadata.embeddedContainers.get( i ) );
 			}
 		}
 	}
@@ -554,7 +568,7 @@
 	private Object unproxy(Object value) {
 		//FIXME this service should be part of Core?
 		if ( value instanceof HibernateProxy ) {
-			value = ( ( HibernateProxy ) value ).getHibernateLazyInitializer()
+			value = ( (HibernateProxy) value ).getHibernateLazyInitializer()
 					.getImplementation();
 		}
 		return value;
@@ -609,7 +623,7 @@
 		Object[] result = new Object[fieldNbr];
 
 		if ( builder.idKeywordName != null ) {
-			populateResult(builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document);
+			populateResult( builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document );
 		}
 
 		final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
@@ -619,20 +633,20 @@
 
 	private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document) {
 		final int nbrFoEntityFields = metadata.fieldNames.size();
-		for (int index = 0 ; index < nbrFoEntityFields; index++ ) {
-			populateResult( metadata.fieldNames.get(index),
-					metadata.fieldBridges.get(index),
-					metadata.fieldStore.get(index),
+		for (int index = 0; index < nbrFoEntityFields; index++) {
+			populateResult( metadata.fieldNames.get( index ),
+					metadata.fieldBridges.get( index ),
+					metadata.fieldStore.get( index ),
 					fields,
 					result,
 					document
 			);
 		}
 		final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
-		for (int index = 0 ; index < nbrOfEmbeddedObjects ; index++) {
+		for (int index = 0; index < nbrOfEmbeddedObjects; index++) {
 			//there is nothing we can do for collections
-			if ( metadata.embeddedContainers.get(index) == PropertiesMetadata.Container.OBJECT) {
-				processFieldsForProjection( metadata.embeddedPropertiesMetadata.get(index), fields, result, document );
+			if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT ) {
+				processFieldsForProjection( metadata.embeddedPropertiesMetadata.get( index ), fields, result, document );
 			}
 		}
 	}
@@ -640,7 +654,7 @@
 	private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
 									   String[] fields, Object[] result, Document document) {
 		int matchingPosition = getFieldPosition( fields, fieldName );
-		if (matchingPosition != -1) {
+		if ( matchingPosition != -1 ) {
 			//TODO make use of an isTwoWay() method
 			if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom( fieldBridge.getClass() ) ) {
 				result[matchingPosition] = ( (TwoWayFieldBridge) fieldBridge ).get( fieldName, document );
@@ -650,7 +664,7 @@
 			}
 			else {
 				if ( store == Field.Store.NO ) {
-					throw new SearchException("Projecting an unstored field: " + fieldName);
+					throw new SearchException( "Projecting an unstored field: " + fieldName );
 				}
 				else {
 					throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass() );
@@ -661,7 +675,7 @@
 
 	private static int getFieldPosition(String[] fields, String fieldName) {
 		int fieldNbr = fields.length;
-		for (int index = 0 ; index < fieldNbr ; index++) {
+		for (int index = 0; index < fieldNbr; index++) {
 			if ( fieldName.equals( fields[index] ) ) return index;
 		}
 		return -1;
@@ -672,7 +686,7 @@
 		Class plainClass = reflectionManager.toClass( beanClass );
 		Set<Class> tempMappedSubclasses = new HashSet<Class>();
 		//together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
-		for ( Class currentClass : indexedClasses ) {
+		for (Class currentClass : indexedClasses) {
 			if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add( currentClass );
 		}
 		mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/Cloud.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/Cloud.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/bridge/Cloud.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -56,8 +56,9 @@
         this.customFieldBridge = customFieldBridge;
     }
 
-    @Text
-    @FieldBridge(impl = TruncateStringBridge.class, params = @Parameter( name="dividedBy", value="4" ) )
+    @Field(index=Index.TOKENIZED, store=Store.YES,
+			bridge = @FieldBridge(impl = TruncateStringBridge.class, params = @Parameter( name="dividedBy", value="4" ) )
+	)
     public String getCustomStringBridge() {
         return customStringBridge;
     }

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/Book.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/Book.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/Book.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -16,6 +16,7 @@
 import org.hibernate.search.annotations.Index;
 import org.hibernate.search.annotations.Store;
 import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Fields;
 
 /**
  * @author Emmanuel Bernard
@@ -77,7 +78,10 @@
 		this.id = id;
 	}
 
-	@Field(index = Index.TOKENIZED, store = Store.YES)
+	@Fields( {
+			@Field(index = Index.TOKENIZED, store = Store.YES),
+			@Field(name = "summary_forSort", index = Index.UN_TOKENIZED, store = Store.YES)
+			} )
 	public String getSummary() {
 		return summary;
 	}

Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQuerySortTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQuerySortTest.java	2007-07-25 20:15:50 UTC (rev 12816)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/LuceneQuerySortTest.java	2007-07-25 20:30:51 UTC (rev 12817)
@@ -66,8 +66,29 @@
 			id--;
 		}
 
+		// order by summary
+		query = parser.parse( "summary:lucene OR summary:action" );
+		hibQuery = s.createFullTextQuery( query, Book.class );
+		sort = new Sort( new SortField( "summary_forSort", false ) ); //ASC
+		hibQuery.setSort( sort );
+		result = hibQuery.list();
+		assertNotNull( result );
+		assertEquals( "Wrong number of test results.", 4, result.size() );
+		assertEquals( "Groovy in Action", result.get( 0 ).getSummary() );
 
+		// order by summary backwards
+		query = parser.parse( "summary:lucene OR summary:action" );
+		hibQuery = s.createFullTextQuery( query, Book.class );
+		sort = new Sort( new SortField( "summary_forSort", true ) ); //DESC
+		hibQuery.setSort( sort );
+		result = hibQuery.list();
+		assertNotNull( result );
+		assertEquals( "Wrong number of test results.", 4, result.size() );
+		assertEquals( "Hibernate & Lucene", result.get( 0 ).getSummary() );
+
 		tx.commit();
+
+		deleteTestBooks(s);
 		s.close();
 	}
 
@@ -86,15 +107,22 @@
 		s.save(book);
 		book = new Book(3, "Hibernate & Lucene", "This is a test book.");
 		s.save(book);
+		book = new Book(4, "Groovy in Action", "The bible of Groovy");
+		s.save(book);
 		tx.commit();
 		s.clear();
 	}
 
+	private void deleteTestBooks(FullTextSession s) {
+		Transaction tx = s.beginTransaction();
+		s.createQuery( "delete " + Book.class.getName() ).executeUpdate();
+		tx.commit();
+		s.clear();
+	}
+
 	protected Class[] getMappings() {
 		return new Class[] {
 				Book.class,
-				AlternateBook.class,
-				Clock.class,
 				Author.class
 		};
 	}




More information about the hibernate-commits mailing list