[hibernate-commits] Hibernate SVN: r14912 - in search/branches/jboss_cache_integration/src: java/org/hibernate/search/annotations and 10 other directories.
hibernate-commits at lists.jboss.org
hibernate-commits at lists.jboss.org
Thu Jul 10 06:37:16 EDT 2008
Author: navssurtani
Date: 2008-07-10 06:37:16 -0400 (Thu, 10 Jul 2008)
New Revision: 14912
Added:
search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/CachingWrapperFilter.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/CachingWrapperFilter.java
Modified:
search/branches/jboss_cache_integration/src/java/org/hibernate/search/ProjectionConstants.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/Search.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/FullTextFilterDef.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/TermVector.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentExtractor.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/FilterDef.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/QueryLoader.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterCachingStrategy.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterKey.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/InitContext.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/Search.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/reader/CacheableMultiReader.java
search/branches/jboss_cache_integration/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
search/branches/jboss_cache_integration/src/test/log4j.properties
Log:
Re-patched with trunk
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/ProjectionConstants.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/ProjectionConstants.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/ProjectionConstants.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -21,6 +21,7 @@
public String SCORE = "__HSearch_Score";
/**
* The boost value of the Lucene document.
+ * @deprecated always return 1
*/
public String BOOST = "__HSearch_Boost";
/**
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/Search.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/Search.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/Search.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -5,15 +5,17 @@
import org.hibernate.search.impl.FullTextSessionImpl;
/**
- * Helper class to get a FullTextSession out of a regular session
+ * Helper class to get a FullTextSession out of a regular session.
+ *
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public final class Search {
private Search() {
}
- public static FullTextSession createFullTextSession(Session session) {
+ public static FullTextSession getFullTextSession(Session session) {
if (session instanceof FullTextSessionImpl) {
return (FullTextSession) session;
}
@@ -21,4 +23,12 @@
return new FullTextSessionImpl(session);
}
}
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link #getFullTextSession(Session)}
+ */
+ @Deprecated
+ public static FullTextSession createFullTextSession(Session session) {
+ return getFullTextSession(session);
+ }
}
\ No newline at end of file
Added: search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/CachingWrapperFilter.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/CachingWrapperFilter.java (rev 0)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/CachingWrapperFilter.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -0,0 +1,26 @@
+// $Id:$
+package org.hibernate.search.annotations;
+
+/**
+ * Defines the strategy for using <code>CachingWrappingFilter</code>
+ *
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.filter.CachingWrapperFilter
+ */
+public enum CachingWrapperFilter {
+ /**
+ * Use a <code>CachingWrapperFilter<code> depending on the value of
+ * @see FullTextFilterDef#cache()
+ */
+ AUTOMATIC,
+
+ /**
+ * Wrap the filter around a <code>CachingWrappingFilter</code>.
+ */
+ YES,
+
+ /**
+ * Do not use a <code>CachingWrappingFilter</code>.
+ */
+ NO;
+}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/FullTextFilterDef.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/FullTextFilterDef.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/FullTextFilterDef.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -19,7 +19,7 @@
@Documented
public @interface FullTextFilterDef {
/**
- * Filter name. Must be unique accross all mappings for a given persistence unit
+ * Filter name. Must be unique across all mappings for a given persistence unit
*/
String name();
@@ -31,10 +31,16 @@
* If the filter accept parameters, an @Key method must be present as well
*
*/
+ @SuppressWarnings("unchecked")
Class impl();
/**
* Enable caching for this filter (default true).
*/
boolean cache() default true;
+
+ /**
+ * Determines whether the filter should be wrapped around a <code>CachingWrapperFilter</code>.
+ */
+ CachingWrapperFilter useCachingWrapperFilter() default CachingWrapperFilter.AUTOMATIC;
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/TermVector.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/TermVector.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/annotations/TermVector.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -1,4 +1,4 @@
-//$
+// $Id:$
package org.hibernate.search.annotations;
/**
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -138,7 +138,7 @@
Hibernate.getClass( work.getEntity() );
DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entityClass );
if ( builder == null ) continue; //or exception?
- builder.addWorkToQueue(entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
+ builder.addWorkToQueue(entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
}
}
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -55,54 +55,51 @@
* @author Richard Hallier
* @author Hardy Ferentschik
*/
-public class DocumentBuilder<T>
-{
- private static final Logger log = LoggerFactory.getLogger(DocumentBuilder.class);
+public class DocumentBuilder<T> {
+ private static final Logger log = LoggerFactory.getLogger( DocumentBuilder.class );
- private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
- private final XClass beanClass;
- private final DirectoryProvider[] directoryProviders;
- private final IndexShardingStrategy shardingStrategy;
- private String idKeywordName;
- private XMember idGetter;
- private Float idBoost;
- public static final String CLASS_FIELDNAME = "_hibernate_class";
- private TwoWayFieldBridge idBridge;
- private Set<Class> mappedSubclasses = new HashSet<Class>();
- private ReflectionManager reflectionManager;
- private int level = 0;
- private int maxLevel = Integer.MAX_VALUE;
- private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
- private Similarity similarity;
- private boolean isRoot;
- //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- private boolean safeFromTupleId;
+ private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
+ private final XClass beanClass;
+ private final DirectoryProvider[] directoryProviders;
+ private final IndexShardingStrategy shardingStrategy;
+ private String idKeywordName;
+ private XMember idGetter;
+ private Float idBoost;
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+ private TwoWayFieldBridge idBridge;
+ private Set<Class> mappedSubclasses = new HashSet<Class>();
+ private ReflectionManager reflectionManager;
+ private int level = 0;
+ private int maxLevel = Integer.MAX_VALUE;
+ private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
+ private Similarity similarity;
+ private boolean isRoot;
+ //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ private boolean safeFromTupleId;
private boolean idProvided = false;
- public boolean isRoot()
- {
- return isRoot;
- }
+ public boolean isRoot() {
+ return isRoot;
+ }
- public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
- IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager)
- {
- this.beanClass = clazz;
- this.directoryProviders = directoryProviders;
- this.shardingStrategy = shardingStrategy;
- //FIXME get rid of it when boost is stored?
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
+ public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
+ IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
+ this.beanClass = clazz;
+ this.directoryProviders = directoryProviders;
+ this.shardingStrategy = shardingStrategy;
+ //FIXME get rid of it when boost is stored?
+ this.reflectionManager = reflectionManager;
+ this.similarity = context.getDefaultSimilarity();
- if (clazz == null) throw new AssertionFailure("Unable to build a DocumentBuilder with a null class");
- rootPropertiesMetadata.boost = getBoost(clazz);
- rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
- Set<XClass> processedClasses = new HashSet<XClass>();
- processedClasses.add(clazz);
- initializeMembers(clazz, rootPropertiesMetadata, true, "", processedClasses, context);
- //processedClasses.remove( clazz ); for the sake of completness
- this.analyzer.setGlobalAnalyzer(rootPropertiesMetadata.analyzer);
- if (idKeywordName == null)
+ if ( clazz == null ) throw new AssertionFailure( "Unable to build a DocumentBuilder with a null class" );
+ rootPropertiesMetadata.boost = getBoost( clazz );
+ rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
+ Set<XClass> processedClasses = new HashSet<XClass>();
+ processedClasses.add( clazz );
+ initializeMembers( clazz, rootPropertiesMetadata, true, "", processedClasses, context );
+ //processedClasses.remove( clazz ); for the sake of completness
+ this.analyzer.setGlobalAnalyzer( rootPropertiesMetadata.analyzer );
+ if ( idKeywordName == null )
{
// if no DocumentId then check if we have a ProvidedId instead
@@ -128,12 +125,11 @@
}
}
- private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context)
- {
- org.hibernate.search.annotations.Analyzer analyzerAnn =
- annotatedElement.getAnnotation(org.hibernate.search.annotations.Analyzer.class);
- return getAnalyzer(analyzerAnn, context);
- }
+ private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
+ org.hibernate.search.annotations.Analyzer analyzerAnn =
+ annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
+ return getAnalyzer( analyzerAnn, context );
+ }
private TwoWayFieldBridge getProvidedIdBridge()
{
@@ -141,872 +137,738 @@
}
- private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context)
- {
- Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
- if (analyzerClass == void.class)
- {
- String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
- if (StringHelper.isEmpty(definition))
- {
- return null;
- }
- else
- {
+ private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context) {
+ Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
+ if ( analyzerClass == void.class ) {
+ String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
+ if ( StringHelper.isEmpty( definition ) ) {
+ return null;
+ }
+ else {
- return context.buildLazyAnalyzer(definition);
- }
- }
- else
- {
- try
- {
- return (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e)
- {
- throw new SearchException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
- );
- }
- catch (Exception e)
- {
- throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e);
- }
- }
- }
+ return context.buildLazyAnalyzer( definition );
+ }
+ }
+ else {
+ try {
+ return (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new SearchException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
+ );
+ }
+ catch (Exception e) {
+ throw new SearchException( "Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e );
+ }
+ }
+ }
- private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
- Set<XClass> processedClasses, InitContext context)
- {
- List<XClass> hierarchy = new ArrayList<XClass>();
- for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass())
- {
- hierarchy.add(currClass);
- }
- Class similarityClass = null;
- for (int index = hierarchy.size() - 1; index >= 0; index--)
- {
- XClass currClass = hierarchy.get(index);
- /**
- * Override the default analyzer for the properties if the class hold one
- * That's the reason we go down the hierarchy
- */
- Analyzer analyzer = getAnalyzer(currClass, context);
+ private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
+ Set<XClass> processedClasses, InitContext context) {
+ List<XClass> hierarchy = new ArrayList<XClass>();
+ for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass()) {
+ hierarchy.add( currClass );
+ }
+ Class similarityClass = null;
+ for (int index = hierarchy.size() - 1; index >= 0; index--) {
+ XClass currClass = hierarchy.get( index );
+ /**
+ * Override the default analyzer for the properties if the class hold one
+ * That's the reason we go down the hierarchy
+ */
+ Analyzer analyzer = getAnalyzer( currClass, context );
- if (analyzer != null)
- {
- propertiesMetadata.analyzer = analyzer;
- }
- getAnalyzerDefs(currClass, context);
- // Check for any ClassBridges annotation.
- ClassBridges classBridgesAnn = currClass.getAnnotation(ClassBridges.class);
- if (classBridgesAnn != null)
- {
- ClassBridge[] cbs = classBridgesAnn.value();
- for (ClassBridge cb : cbs)
- {
- bindClassAnnotation(prefix, propertiesMetadata, cb, context);
- }
- }
+ if ( analyzer != null ) {
+ propertiesMetadata.analyzer = analyzer;
+ }
+ getAnalyzerDefs(currClass, context);
+ // Check for any ClassBridges annotation.
+ ClassBridges classBridgesAnn = currClass.getAnnotation( ClassBridges.class );
+ if ( classBridgesAnn != null ) {
+ ClassBridge[] cbs = classBridgesAnn.value();
+ for (ClassBridge cb : cbs) {
+ bindClassAnnotation( prefix, propertiesMetadata, cb, context );
+ }
+ }
- // Check for any ClassBridge style of annotations.
- ClassBridge classBridgeAnn = currClass.getAnnotation(ClassBridge.class);
- if (classBridgeAnn != null)
- {
- bindClassAnnotation(prefix, propertiesMetadata, classBridgeAnn, context);
- }
+ // Check for any ClassBridge style of annotations.
+ ClassBridge classBridgeAnn = currClass.getAnnotation( ClassBridge.class );
+ if ( classBridgeAnn != null ) {
+ bindClassAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
+ }
- //Get similarity
- //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
- if (isRoot)
- {
- org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation(org.hibernate.search.annotations.Similarity.class);
- if (similarityAnn != null)
- {
- if (similarityClass != null)
- {
- throw new SearchException("Multiple Similarities defined in the same class hierarchy: " + beanClass.getName());
- }
- similarityClass = similarityAnn.impl();
- }
- }
+ //Get similarity
+ //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
+ if ( isRoot ) {
+ org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation( org.hibernate.search.annotations.Similarity.class );
+ if ( similarityAnn != null ) {
+ if ( similarityClass != null ) {
+ throw new SearchException( "Multiple Similarities defined in the same class hierarchy: " + beanClass.getName() );
+ }
+ similarityClass = similarityAnn.impl();
+ }
+ }
- //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
- // so indexing a non property does not make sense
- List<XProperty> methods = currClass.getDeclaredProperties(XClass.ACCESS_PROPERTY);
- for (XProperty method : methods)
- {
- initializeMember(method, propertiesMetadata, isRoot, prefix, processedClasses, context);
- }
+ //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
+ // so indexing a non property does not make sense
+ List<XProperty> methods = currClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
+ for (XProperty method : methods) {
+ initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses, context );
+ }
- List<XProperty> fields = currClass.getDeclaredProperties(XClass.ACCESS_FIELD);
- for (XProperty field : fields)
- {
- initializeMember(field, propertiesMetadata, isRoot, prefix, processedClasses, context);
- }
- }
- if (isRoot && similarityClass != null)
- {
- try
- {
- similarity = (Similarity) similarityClass.newInstance();
- }
- catch (Exception e)
- {
- log.error("Exception attempting to instantiate Similarity '{}' set for {}",
- similarityClass.getName(), beanClass.getName());
- }
- }
- }
+ List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD );
+ for (XProperty field : fields) {
+ initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses, context );
+ }
+ }
+ if ( isRoot && similarityClass != null ) {
+ try {
+ similarity = (Similarity) similarityClass.newInstance();
+ }
+ catch (Exception e) {
+ log.error( "Exception attempting to instantiate Similarity '{}' set for {}",
+ similarityClass.getName(), beanClass.getName() );
+ }
+ }
+ }
- private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context)
- {
- AnalyzerDefs defs = annotatedElement.getAnnotation(AnalyzerDefs.class);
- if (defs != null)
- {
- for (AnalyzerDef def : defs.value())
- {
- context.addAnalyzerDef(def);
- }
- }
- AnalyzerDef def = annotatedElement.getAnnotation(AnalyzerDef.class);
- context.addAnalyzerDef(def);
- }
+ private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context) {
+ AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
+ if ( defs != null ) {
+ for (AnalyzerDef def : defs.value()) {
+ context.addAnalyzerDef( def );
+ }
+ }
+ AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
+ context.addAnalyzerDef( def );
+ }
- public String getIdentifierName()
- {
- return idGetter.getName();
- }
+ public String getIdentifierName() {
+ return idGetter.getName();
+ }
- public Similarity getSimilarity()
- {
- return similarity;
- }
+ public Similarity getSimilarity() {
+ return similarity;
+ }
- private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
- String prefix, Set<XClass> processedClasses, InitContext context)
- {
+ private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
+ String prefix, Set<XClass> processedClasses, InitContext context) {
- DocumentId documentIdAnn = member.getAnnotation(DocumentId.class);
- if (documentIdAnn != null)
- {
- if (isRoot)
- {
- if (idKeywordName != null)
- {
- throw new AssertionFailure("Two document id assigned: "
- + idKeywordName + " and " + BinderHelper.getAttributeName(member, documentIdAnn.name()));
- }
- idKeywordName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
- FieldBridge fieldBridge = BridgeFactory.guessType(null, member, reflectionManager);
- if (fieldBridge instanceof TwoWayFieldBridge)
- {
- idBridge = (TwoWayFieldBridge) fieldBridge;
- }
- else
- {
- throw new SearchException(
- "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName());
- }
- idBoost = getBoost(member);
- setAccessible(member);
- idGetter = member;
- }
- else
- {
- //component should index their document id
- setAccessible(member);
- propertiesMetadata.fieldGetters.add(member);
- String fieldName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
- propertiesMetadata.fieldNames.add(fieldName);
- propertiesMetadata.fieldStore.add(getStore(Store.YES));
- propertiesMetadata.fieldIndex.add(getIndex(Index.UN_TOKENIZED));
- propertiesMetadata.fieldTermVectors.add(getTermVector(TermVector.NO));
- propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(null, member, reflectionManager));
- // property > entity analyzer (no field analyzer)
- Analyzer analyzer = getAnalyzer(member, context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
- }
- {
- org.hibernate.search.annotations.Field fieldAnn =
- member.getAnnotation(org.hibernate.search.annotations.Field.class);
- if (fieldAnn != null)
- {
- bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
- }
- }
- {
- org.hibernate.search.annotations.Fields fieldsAnn =
- member.getAnnotation(org.hibernate.search.annotations.Fields.class);
- if (fieldsAnn != null)
- {
- for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value())
- {
- bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
- }
- }
- }
- getAnalyzerDefs(member, context);
+ DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ if ( isRoot ) {
+ if ( idKeywordName != null ) {
+ throw new AssertionFailure( "Two document id assigned: "
+ + idKeywordName + " and " + BinderHelper.getAttributeName( member, documentIdAnn.name() ) );
+ }
+ idKeywordName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
+ FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager );
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = (TwoWayFieldBridge) fieldBridge;
+ }
+ else {
+ throw new SearchException(
+ "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName() );
+ }
+ idBoost = getBoost( member );
+ setAccessible( member );
+ idGetter = member;
+ }
+ else {
+ //component should index their document id
+ setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
+ propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member, reflectionManager ) );
+ // property > entity analyzer (no field analyzer)
+ Analyzer analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+ {
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Field.class );
+ if ( fieldAnn != null ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ {
+ org.hibernate.search.annotations.Fields fieldsAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Fields.class );
+ if ( fieldsAnn != null ) {
+ for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value()) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ }
+ getAnalyzerDefs( member, context );
- IndexedEmbedded embeddedAnn = member.getAnnotation(IndexedEmbedded.class);
- if (embeddedAnn != null)
- {
- int oldMaxLevel = maxLevel;
- int potentialLevel = embeddedAnn.depth() + level;
- if (potentialLevel < 0)
- {
- potentialLevel = Integer.MAX_VALUE;
- }
- maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
- level++;
+ IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
+ if ( embeddedAnn != null ) {
+ int oldMaxLevel = maxLevel;
+ int potentialLevel = embeddedAnn.depth() + level;
+ if ( potentialLevel < 0 ) {
+ potentialLevel = Integer.MAX_VALUE;
+ }
+ maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
+ level++;
- XClass elementClass;
- if (void.class == embeddedAnn.targetElement())
- {
- elementClass = member.getElementClass();
- }
- else
- {
- elementClass = reflectionManager.toXClass(embeddedAnn.targetElement());
- }
- if (maxLevel == Integer.MAX_VALUE //infinite
- && processedClasses.contains(elementClass))
- {
- throw new SearchException(
- "Circular reference. Duplicate use of "
- + elementClass.getName()
- + " in root entity " + beanClass.getName()
- + "#" + buildEmbeddedPrefix(prefix, embeddedAnn, member)
- );
- }
- if (level <= maxLevel)
- {
- processedClasses.add(elementClass); //push
+ XClass elementClass;
+ if ( void.class == embeddedAnn.targetElement() ) {
+ elementClass = member.getElementClass();
+ }
+ else {
+ elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
+ }
+ if ( maxLevel == Integer.MAX_VALUE //infinite
+ && processedClasses.contains( elementClass ) ) {
+ throw new SearchException(
+ "Circular reference. Duplicate use of "
+ + elementClass.getName()
+ + " in root entity " + beanClass.getName()
+ + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
+ );
+ }
+ if ( level <= maxLevel ) {
+ processedClasses.add( elementClass ); //push
- setAccessible(member);
- propertiesMetadata.embeddedGetters.add(member);
- PropertiesMetadata metadata = new PropertiesMetadata();
- propertiesMetadata.embeddedPropertiesMetadata.add(metadata);
- metadata.boost = getBoost(member);
- //property > entity analyzer
- Analyzer analyzer = getAnalyzer(member, context);
- metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
- String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
- initializeMembers(elementClass, metadata, false, localPrefix, processedClasses, context);
- /**
- * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
- */
- if (member.isArray())
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.ARRAY);
- }
- else if (member.isCollection())
- {
- if (Map.class.equals(member.getCollectionClass()))
- {
- //hum subclasses etc etc??
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.MAP);
- }
- else
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.COLLECTION);
- }
- }
- else
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.OBJECT);
- }
+ setAccessible( member );
+ propertiesMetadata.embeddedGetters.add( member );
+ PropertiesMetadata metadata = new PropertiesMetadata();
+ propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
+ metadata.boost = getBoost( member );
+ //property > entity analyzer
+ Analyzer analyzer = getAnalyzer( member, context );
+ metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ initializeMembers( elementClass, metadata, false, localPrefix, processedClasses, context );
+ /**
+ * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
+ */
+ if ( member.isArray() ) {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
+ }
+ else if ( member.isCollection() ) {
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum subclasses etc etc??
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION );
+ }
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
+ }
- processedClasses.remove(elementClass); //pop
- }
- else if (log.isTraceEnabled())
- {
- String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
- log.trace("depth reached, ignoring {}", localPrefix);
- }
+ processedClasses.remove( elementClass ); //pop
+ }
+ else if ( log.isTraceEnabled() ) {
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ log.trace( "depth reached, ignoring {}", localPrefix );
+ }
- level--;
- maxLevel = oldMaxLevel; //set back the the old max level
- }
+ level--;
+ maxLevel = oldMaxLevel; //set back the the old max level
+ }
- ContainedIn containedAnn = member.getAnnotation(ContainedIn.class);
- if (containedAnn != null)
- {
- setAccessible(member);
- propertiesMetadata.containedInGetters.add(member);
- }
- }
+ ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
+ if ( containedAnn != null ) {
+ setAccessible( member );
+ propertiesMetadata.containedInGetters.add( member );
+ }
+ }
- private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context)
- {
- //FIXME name should be prefixed
- String fieldName = prefix + ann.name();
- propertiesMetadata.classNames.add(fieldName);
- propertiesMetadata.classStores.add(getStore(ann.store()));
- propertiesMetadata.classIndexes.add(getIndex(ann.index()));
- propertiesMetadata.classTermVectors.add(getTermVector(ann.termVector()));
- propertiesMetadata.classBridges.add(BridgeFactory.extractType(ann));
- propertiesMetadata.classBoosts.add(ann.boost().value());
+ private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context) {
+ //FIXME name should be prefixed
+ String fieldName = prefix + ann.name();
+ propertiesMetadata.classNames.add( fieldName );
+ propertiesMetadata.classStores.add( getStore( ann.store() ) );
+ propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
+ propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
+ propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
+ propertiesMetadata.classBoosts.add( ann.boost().value() );
- Analyzer analyzer = getAnalyzer(ann.analyzer(), context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analyzer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
+ Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analyzer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
- private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context)
- {
- setAccessible(member);
- propertiesMetadata.fieldGetters.add(member);
- String fieldName = prefix + BinderHelper.getAttributeName(member, fieldAnn.name());
- propertiesMetadata.fieldNames.add(fieldName);
- propertiesMetadata.fieldStore.add(getStore(fieldAnn.store()));
- propertiesMetadata.fieldIndex.add(getIndex(fieldAnn.index()));
- propertiesMetadata.fieldTermVectors.add(getTermVector(fieldAnn.termVector()));
- propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(fieldAnn, member, reflectionManager));
+ private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context) {
+ setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
+ propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member, reflectionManager ) );
- // Field > property > entity analyzer
- Analyzer analyzer = getAnalyzer(fieldAnn.analyzer(), context);
- if (analyzer == null) analyzer = getAnalyzer(member, context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
+ // Field > property > entity analyzer
+ Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
+ if ( analyzer == null ) analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
- private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member)
- {
- String localPrefix = prefix;
- if (".".equals(embeddedAnn.prefix()))
- {
- //default to property name
- localPrefix += member.getName() + '.';
- }
- else
- {
- localPrefix += embeddedAnn.prefix();
- }
- return localPrefix;
- }
+ private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member) {
+ String localPrefix = prefix;
+ if ( ".".equals( embeddedAnn.prefix() ) ) {
+ //default to property name
+ localPrefix += member.getName() + '.';
+ }
+ else {
+ localPrefix += embeddedAnn.prefix();
+ }
+ return localPrefix;
+ }
- private Field.Store getStore(Store store)
- {
- switch (store)
- {
- case NO:
- return Field.Store.NO;
- case YES:
- return Field.Store.YES;
- case COMPRESS:
- return Field.Store.COMPRESS;
- default:
- throw new AssertionFailure("Unexpected Store: " + store);
- }
- }
+ private Field.Store getStore(Store store) {
+ switch ( store ) {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure( "Unexpected Store: " + store );
+ }
+ }
- private Field.TermVector getTermVector(TermVector vector)
- {
- switch (vector)
- {
- case NO:
- return Field.TermVector.NO;
- case YES:
- return Field.TermVector.YES;
- case WITH_OFFSETS:
- return Field.TermVector.WITH_OFFSETS;
- case WITH_POSITIONS:
- return Field.TermVector.WITH_POSITIONS;
- case WITH_POSITION_OFFSETS:
- return Field.TermVector.WITH_POSITIONS_OFFSETS;
- default:
- throw new AssertionFailure("Unexpected TermVector: " + vector);
- }
- }
+ private Field.TermVector getTermVector(TermVector vector) {
+ switch ( vector ) {
+ case NO:
+ return Field.TermVector.NO;
+ case YES:
+ return Field.TermVector.YES;
+ case WITH_OFFSETS:
+ return Field.TermVector.WITH_OFFSETS;
+ case WITH_POSITIONS:
+ return Field.TermVector.WITH_POSITIONS;
+ case WITH_POSITION_OFFSETS:
+ return Field.TermVector.WITH_POSITIONS_OFFSETS;
+ default:
+ throw new AssertionFailure( "Unexpected TermVector: " + vector );
+ }
+ }
- private Field.Index getIndex(Index index)
- {
- switch (index)
- {
- case NO:
- return Field.Index.NO;
- case NO_NORMS:
- return Field.Index.NO_NORMS;
- case TOKENIZED:
- return Field.Index.TOKENIZED;
- case UN_TOKENIZED:
- return Field.Index.UN_TOKENIZED;
- default:
- throw new AssertionFailure("Unexpected Index: " + index);
- }
- }
+ private Field.Index getIndex(Index index) {
+ switch ( index ) {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.TOKENIZED;
+ case UN_TOKENIZED:
+ return Field.Index.UN_TOKENIZED;
+ default:
+ throw new AssertionFailure( "Unexpected Index: " + index );
+ }
+ }
- private Float getBoost(XAnnotatedElement element)
- {
- if (element == null) return null;
- Boost boost = element.getAnnotation(Boost.class);
- return boost != null ?
- boost.value() :
- null;
- }
+ private Float getBoost(XAnnotatedElement element) {
+ if ( element == null ) return null;
+ Boost boost = element.getAnnotation( Boost.class );
+ return boost != null ?
+ boost.value() :
+ null;
+ }
- private Object getMemberValue(Object bean, XMember getter)
- {
- Object value;
- try
- {
- value = getter.invoke(bean);
- }
- catch (Exception e)
- {
- throw new IllegalStateException("Could not get property value", e);
- }
- return value;
- }
+ private Object getMemberValue(Object bean, XMember getter) {
+ Object value;
+ try {
+ value = getter.invoke( bean );
+ }
+ catch (Exception e) {
+ throw new IllegalStateException( "Could not get property value", e );
+ }
+ return value;
+ }
- //TODO could we use T instead of EntityClass?
- public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor)
- {
- //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
- for (LuceneWork luceneWork : queue)
- {
- //any work on the same entity should be ignored
- if (luceneWork.getEntityClass() == entityClass
- )
- {
- Serializable currentId = luceneWork.getId();
- if (currentId != null && currentId.equals(id))
- { //find a way to use Type.equals(x,y)
- return;
- }
- //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
- }
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
+ //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
+ for (LuceneWork luceneWork : queue) {
+ //any work on the same entity should be ignored
+ if ( luceneWork.getEntityClass() == entityClass
+ ) {
+ Serializable currentId = luceneWork.getId();
+ if ( currentId != null && currentId.equals( id ) ) { //find a way to use Type.equals(x,y)
+ return;
+ }
+ //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
+ }
- }
- boolean searchForContainers = false;
- // if the ID is provided for this type, then just directly cast the id to a String.
- String idInString = idProvided ? (String) id : idBridge.objectToString(id);
- if (workType == WorkType.ADD)
- {
- Document doc = getDocument(entity, id);
- queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
- searchForContainers = true;
- }
- else if (workType == WorkType.DELETE || workType == WorkType.PURGE)
- {
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- }
- else if (workType == WorkType.PURGE_ALL)
- {
- queue.add(new PurgeAllLuceneWork(entityClass));
- }
- else if (workType == WorkType.UPDATE || workType == WorkType.COLLECTION)
- {
- Document doc = getDocument(entity, id);
- /**
- * even with Lucene 2.1, use of indexWriter to update is not an option
- * We can only delete by term, and the index doesn't have a term that
- * uniquely identify the entry.
- * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
- * double file opening.
- */
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
- searchForContainers = true;
- }
- else if (workType == WorkType.INDEX)
- {
- Document doc = getDocument(entity, id);
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- LuceneWork work = new AddLuceneWork(id, idInString, entityClass, doc);
- work.setBatch(true);
- queue.add(work);
- searchForContainers = true;
- }
+ }
+ boolean searchForContainers = false;
+ String idInString = idBridge.objectToString( id );
+ if ( workType == WorkType.ADD ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ searchForContainers = true;
+ }
+ else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ }
+ else if ( workType == WorkType.PURGE_ALL ) {
+ queue.add( new PurgeAllLuceneWork( entityClass ) );
+ }
+ else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
+ Document doc = getDocument( entity, id );
+ /**
+ * even with Lucene 2.1, use of indexWriter to update is not an option
+ * We can only delete by term, and the index doesn't have a term that
+ * uniquely identify the entry.
+ * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
+ * double file opening.
+ */
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ searchForContainers = true;
+ }
+ else if ( workType == WorkType.INDEX ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ LuceneWork work = new AddLuceneWork( id, idInString, entityClass, doc );
+ work.setBatch( true );
+ queue.add( work );
+ searchForContainers = true;
+ }
- else
- {
- throw new AssertionFailure("Unknown WorkType: " + workType);
- }
+ else {
+ throw new AssertionFailure( "Unknown WorkType: " + workType );
+ }
- /**
- * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
- * have to be updated)
- * When the internal object is changed, we apply the {Add|Update}Work on containedIns
- */
- if (searchForContainers)
- {
- processContainedIn(entity, queue, rootPropertiesMetadata, searchFactoryImplementor);
- }
- }
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ if ( searchForContainers ) {
+ processContainedIn( entity, queue, rootPropertiesMetadata, searchFactoryImplementor );
+ }
+ }
- private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor)
- {
- for (int i = 0; i < metadata.containedInGetters.size(); i++)
- {
- XMember member = metadata.containedInGetters.get(i);
- Object value = getMemberValue(instance, member);
- if (value == null) continue;
+ private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
+ for (int i = 0; i < metadata.containedInGetters.size(); i++) {
+ XMember member = metadata.containedInGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ if ( value == null ) continue;
- if (member.isArray())
- {
- for (Object arrayValue : (Object[]) value)
- {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass(arrayValue);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(arrayValue, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- else if (member.isCollection())
- {
- Collection collection;
- if (Map.class.equals(member.getCollectionClass()))
- {
- //hum
- collection = ((Map) value).values();
- }
- else
- {
- collection = (Collection) value;
- }
- for (Object collectionValue : collection)
- {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass(collectionValue);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(collectionValue, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- else
- {
- Class valueClass = Hibernate.getClass(value);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(value, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- //an embedded cannot have a useful @ContainedIn (no shared reference)
- //do not walk through them
- }
+ if ( member.isArray() ) {
+ for (Object arrayValue : (Object[]) value) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass( arrayValue );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( arrayValue, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ else if ( member.isCollection() ) {
+ Collection collection;
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum
+ collection = ( (Map) value ).values();
+ }
+ else {
+ collection = (Collection) value;
+ }
+ for (Object collectionValue : collection) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass( collectionValue );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( collectionValue, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ else {
+ Class valueClass = Hibernate.getClass( value );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( value, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ //an embedded cannot have a useful @ContainedIn (no shared reference)
+ //do not walk through them
+ }
- private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
- DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor)
- {
- Serializable id = (Serializable) builder.getMemberValue(value, builder.idGetter);
- builder.addWorkToQueue(valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor);
- }
+ private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
+ DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor) {
+ Serializable id = (Serializable) builder.getMemberValue( value, builder.idGetter );
+ builder.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
+ }
- public Document getDocument(T instance, Serializable id)
- {
- Document doc = new Document();
- XClass instanceClass = reflectionManager.toXClass(Hibernate.getClass(instance));
- if (rootPropertiesMetadata.boost != null)
- {
- doc.setBoost(rootPropertiesMetadata.boost);
- }
- {
- Field classField =
- new Field(CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO);
- doc.add(classField);
- if(!idProvided) idBridge.set(idKeywordName, id, doc, Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost);
- }
- buildDocumentFields(instance, doc, rootPropertiesMetadata);
- return doc;
- }
+ public Document getDocument(T instance, Serializable id) {
+ Document doc = new Document();
+ XClass instanceClass = reflectionManager.toXClass( Hibernate.getClass( instance ) );
+ if ( rootPropertiesMetadata.boost != null ) {
+ doc.setBoost( rootPropertiesMetadata.boost );
+ }
+ {
+ Field classField =
+ new Field( CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO );
+ doc.add( classField );
+ idBridge.set( idKeywordName, id, doc, Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost );
+ }
+ buildDocumentFields( instance, doc, rootPropertiesMetadata );
+ return doc;
+ }
- private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata)
- {
- if (instance == null) return;
- //needed for field access: I cannot work in the proxied version
- Object unproxiedInstance = unproxy(instance);
- for (int i = 0; i < propertiesMetadata.classBridges.size(); i++)
- {
- FieldBridge fb = propertiesMetadata.classBridges.get(i);
+ private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata) {
+ if ( instance == null ) return;
+ //needed for field access: I cannot work in the proxied version
+ Object unproxiedInstance = unproxy( instance );
+ for (int i = 0; i < propertiesMetadata.classBridges.size(); i++) {
+ FieldBridge fb = propertiesMetadata.classBridges.get( i );
- fb.set(propertiesMetadata.classNames.get(i),
- unproxiedInstance,
- doc,
- propertiesMetadata.classStores.get(i),
- propertiesMetadata.classIndexes.get(i),
- propertiesMetadata.classTermVectors.get(i),
- propertiesMetadata.classBoosts.get(i));
- }
- for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++)
- {
- XMember member = propertiesMetadata.fieldGetters.get(i);
- Object value = getMemberValue(unproxiedInstance, member);
- propertiesMetadata.fieldBridges.get(i).set(
- propertiesMetadata.fieldNames.get(i),
- value, doc,
- propertiesMetadata.fieldStore.get(i),
- propertiesMetadata.fieldIndex.get(i),
- propertiesMetadata.fieldTermVectors.get(i),
- getBoost(member)
- );
- }
- for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++)
- {
- XMember member = propertiesMetadata.embeddedGetters.get(i);
- Object value = getMemberValue(unproxiedInstance, member);
- //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
+ fb.set( propertiesMetadata.classNames.get( i ),
+ unproxiedInstance,
+ doc,
+ propertiesMetadata.classStores.get( i ),
+ propertiesMetadata.classIndexes.get( i ),
+ propertiesMetadata.classTermVectors.get( i ),
+ propertiesMetadata.classBoosts.get( i ) );
+ }
+ for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++) {
+ XMember member = propertiesMetadata.fieldGetters.get( i );
+ Object value = getMemberValue( unproxiedInstance, member );
+ propertiesMetadata.fieldBridges.get( i ).set(
+ propertiesMetadata.fieldNames.get( i ),
+ value, doc,
+ propertiesMetadata.fieldStore.get( i ),
+ propertiesMetadata.fieldIndex.get( i ),
+ propertiesMetadata.fieldTermVectors.get( i ),
+ getBoost( member )
+ );
+ }
+ for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++) {
+ XMember member = propertiesMetadata.embeddedGetters.get( i );
+ Object value = getMemberValue( unproxiedInstance, member );
+ //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
- if (value == null) continue;
- PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get(i);
- switch (propertiesMetadata.embeddedContainers.get(i))
- {
- case ARRAY:
- for (Object arrayValue : (Object[]) value)
- {
- buildDocumentFields(arrayValue, doc, embeddedMetadata);
- }
- break;
- case COLLECTION:
- for (Object collectionValue : (Collection) value)
- {
- buildDocumentFields(collectionValue, doc, embeddedMetadata);
- }
- break;
- case MAP:
- for (Object collectionValue : ((Map) value).values())
- {
- buildDocumentFields(collectionValue, doc, embeddedMetadata);
- }
- break;
- case OBJECT:
- buildDocumentFields(value, doc, embeddedMetadata);
- break;
- default:
- throw new AssertionFailure("Unknown embedded container: "
- + propertiesMetadata.embeddedContainers.get(i));
- }
- }
- }
+ if ( value == null ) continue;
+ PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get( i );
+ switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
+ case ARRAY:
+ for (Object arrayValue : (Object[]) value) {
+ buildDocumentFields( arrayValue, doc, embeddedMetadata );
+ }
+ break;
+ case COLLECTION:
+ for (Object collectionValue : (Collection) value) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case MAP:
+ for (Object collectionValue : ( (Map) value ).values()) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case OBJECT:
+ buildDocumentFields( value, doc, embeddedMetadata );
+ break;
+ default:
+ throw new AssertionFailure( "Unknown embedded container: "
+ + propertiesMetadata.embeddedContainers.get( i ) );
+ }
+ }
+ }
- private Object unproxy(Object value)
- {
- //FIXME this service should be part of Core?
- if (value instanceof HibernateProxy)
- {
- // .getImplementation() initializes the data by side effect
- value = ((HibernateProxy) value).getHibernateLazyInitializer()
- .getImplementation();
- }
- return value;
- }
+ private Object unproxy(Object value) {
+ //FIXME this service should be part of Core?
+ if ( value instanceof HibernateProxy ) {
+ // .getImplementation() initializes the data by side effect
+ value = ( (HibernateProxy) value ).getHibernateLazyInitializer()
+ .getImplementation();
+ }
+ return value;
+ }
- public Term getTerm(Serializable id)
+ public Term getTerm(Serializable id)
{
if (idProvided)
{
return new Term(idKeywordName, (String) id);
}
- return new Term(idKeywordName, idBridge.objectToString(id));
- }
+ return new Term( idKeywordName, idBridge.objectToString( id ) );
+ }
- public DirectoryProvider[] getDirectoryProviders()
- {
- return directoryProviders;
- }
+ public DirectoryProvider[] getDirectoryProviders() {
+ return directoryProviders;
+ }
- public IndexShardingStrategy getDirectoryProviderSelectionStrategy()
- {
- return shardingStrategy;
- }
+ public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
+ return shardingStrategy;
+ }
- public Analyzer getAnalyzer()
- {
- return analyzer;
- }
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
- private static void setAccessible(XMember member)
- {
- if (!Modifier.isPublic(member.getModifiers()))
- {
- member.setAccessible(true);
- }
- }
+ private static void setAccessible(XMember member) {
+ if ( !Modifier.isPublic( member.getModifiers() ) ) {
+ member.setAccessible( true );
+ }
+ }
- public TwoWayFieldBridge getIdBridge()
- {
- return idBridge;
- }
+ public TwoWayFieldBridge getIdBridge() {
+ return idBridge;
+ }
- public String getIdKeywordName()
- {
- return idKeywordName;
- }
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
- public static Class getDocumentClass(Document document)
- {
- String className = document.get(DocumentBuilder.CLASS_FIELDNAME);
- try
- {
- return ReflectHelper.classForName(className);
- }
- catch (ClassNotFoundException e)
- {
- throw new SearchException("Unable to load indexed class: " + className, e);
- }
- }
+ public static Class getDocumentClass(Document document) {
+ String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
+ try {
+ return ReflectHelper.classForName( className );
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to load indexed class: " + className, e );
+ }
+ }
- public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document)
- {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
- if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
- return (Serializable) builder.getIdBridge().get(builder.getIdKeywordName(), document);
- }
+ public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document) {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
+ if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
+ }
- public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields)
- {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
- if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
- final int fieldNbr = fields.length;
- Object[] result = new Object[fieldNbr];
+ public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields) {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
+ if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ final int fieldNbr = fields.length;
+ Object[] result = new Object[fieldNbr];
- if (builder.idKeywordName != null)
- {
- populateResult(builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document);
- }
+ if ( builder.idKeywordName != null ) {
+ populateResult( builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document );
+ }
- final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
- processFieldsForProjection(metadata, fields, result, document);
- return result;
- }
+ final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
+ processFieldsForProjection( metadata, fields, result, document );
+ return result;
+ }
- private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document)
- {
- final int nbrFoEntityFields = metadata.fieldNames.size();
- for (int index = 0; index < nbrFoEntityFields; index++)
- {
- populateResult(metadata.fieldNames.get(index),
- metadata.fieldBridges.get(index),
- metadata.fieldStore.get(index),
- fields,
- result,
- document
- );
- }
- final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
- for (int index = 0; index < nbrOfEmbeddedObjects; index++)
- {
- //there is nothing we can do for collections
- if (metadata.embeddedContainers.get(index) == PropertiesMetadata.Container.OBJECT)
- {
- processFieldsForProjection(metadata.embeddedPropertiesMetadata.get(index), fields, result, document);
- }
- }
- }
+ private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document) {
+ final int nbrFoEntityFields = metadata.fieldNames.size();
+ for (int index = 0; index < nbrFoEntityFields; index++) {
+ populateResult( metadata.fieldNames.get( index ),
+ metadata.fieldBridges.get( index ),
+ metadata.fieldStore.get( index ),
+ fields,
+ result,
+ document
+ );
+ }
+ final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
+ for (int index = 0; index < nbrOfEmbeddedObjects; index++) {
+ //there is nothing we can do for collections
+ if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT ) {
+ processFieldsForProjection( metadata.embeddedPropertiesMetadata.get( index ), fields, result, document );
+ }
+ }
+ }
- private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
- String[] fields, Object[] result, Document document)
- {
- int matchingPosition = getFieldPosition(fields, fieldName);
- if (matchingPosition != -1)
- {
- //TODO make use of an isTwoWay() method
- if (store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom(fieldBridge.getClass()))
- {
- result[matchingPosition] = ((TwoWayFieldBridge) fieldBridge).get(fieldName, document);
- if (log.isTraceEnabled())
- {
- log.trace("Field {} projected as {}", fieldName, result[matchingPosition]);
- }
- }
- else
- {
- if (store == Field.Store.NO)
- {
- throw new SearchException("Projecting an unstored field: " + fieldName);
- }
- else
- {
- throw new SearchException("FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass());
- }
- }
- }
- }
+ private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
+ String[] fields, Object[] result, Document document) {
+ int matchingPosition = getFieldPosition( fields, fieldName );
+ if ( matchingPosition != -1 ) {
+ //TODO make use of an isTwoWay() method
+ if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom( fieldBridge.getClass() ) ) {
+ result[matchingPosition] = ( (TwoWayFieldBridge) fieldBridge ).get( fieldName, document );
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Field {} projected as {}", fieldName, result[matchingPosition] );
+ }
+ }
+ else {
+ if ( store == Field.Store.NO ) {
+ throw new SearchException( "Projecting an unstored field: " + fieldName );
+ }
+ else {
+ throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass() );
+ }
+ }
+ }
+ }
- private static int getFieldPosition(String[] fields, String fieldName)
- {
- int fieldNbr = fields.length;
- for (int index = 0; index < fieldNbr; index++)
- {
- if (fieldName.equals(fields[index])) return index;
- }
- return -1;
- }
+ private static int getFieldPosition(String[] fields, String fieldName) {
+ int fieldNbr = fields.length;
+ for (int index = 0; index < fieldNbr; index++) {
+ if ( fieldName.equals( fields[index] ) ) return index;
+ }
+ return -1;
+ }
- public void postInitialize(Set<Class> indexedClasses)
- {
- //this method does not requires synchronization
- Class plainClass = reflectionManager.toClass(beanClass);
- Set<Class> tempMappedSubclasses = new HashSet<Class>();
- //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
- for (Class currentClass : indexedClasses)
- {
- if (plainClass.isAssignableFrom(currentClass)) tempMappedSubclasses.add(currentClass);
- }
- this.mappedSubclasses = Collections.unmodifiableSet(tempMappedSubclasses);
- Class superClass = plainClass.getSuperclass();
- this.isRoot = true;
- while (superClass != null)
- {
- if (indexedClasses.contains(superClass))
- {
- this.isRoot = false;
- break;
- }
- superClass = superClass.getSuperclass();
- }
- }
+ public void postInitialize(Set<Class> indexedClasses) {
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass( beanClass );
+ Set<Class> tempMappedSubclasses = new HashSet<Class>();
+ //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
+ for (Class currentClass : indexedClasses) {
+ if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add( currentClass );
+ }
+ this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
+ Class superClass = plainClass.getSuperclass();
+ this.isRoot = true;
+ while ( superClass != null) {
+ if ( indexedClasses.contains( superClass ) ) {
+ this.isRoot = false;
+ break;
+ }
+ superClass = superClass.getSuperclass();
+ }
+ }
- public Set<Class> getMappedSubclasses()
- {
- return mappedSubclasses;
- }
+ public Set<Class> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
- /**
- * Make sure to return false if there is a risk of composite id
- * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- */
- public boolean isSafeFromTupleId()
- {
- return safeFromTupleId;
- }
+ /**
+ * Make sure to return false if there is a risk of composite id
+ * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ */
+ public boolean isSafeFromTupleId() {
+ return safeFromTupleId;
+ }
- private static class PropertiesMetadata
- {
- public Float boost;
- public Analyzer analyzer;
- public final List<String> fieldNames = new ArrayList<String>();
- public final List<XMember> fieldGetters = new ArrayList<XMember>();
- public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
- public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
- public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
- public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
- public final List<XMember> embeddedGetters = new ArrayList<XMember>();
- public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
- public final List<Container> embeddedContainers = new ArrayList<Container>();
- public final List<XMember> containedInGetters = new ArrayList<XMember>();
- public final List<String> classNames = new ArrayList<String>();
- public final List<Field.Store> classStores = new ArrayList<Field.Store>();
- public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
- public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
- public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
- public final List<Float> classBoosts = new ArrayList<Float>();
+ private static class PropertiesMetadata {
+ public Float boost;
+ public Analyzer analyzer;
+ public final List<String> fieldNames = new ArrayList<String>();
+ public final List<XMember> fieldGetters = new ArrayList<XMember>();
+ public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
+ public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+ public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
+ public final List<XMember> embeddedGetters = new ArrayList<XMember>();
+ public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
+ public final List<Container> embeddedContainers = new ArrayList<Container>();
+ public final List<XMember> containedInGetters = new ArrayList<XMember>();
+ public final List<String> classNames = new ArrayList<String>();
+ public final List<Field.Store> classStores = new ArrayList<Field.Store>();
+ public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
+ public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
+ public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
+ public final List<Float> classBoosts = new ArrayList<Float>();
- public enum Container
- {
- OBJECT,
- COLLECTION,
- MAP,
- ARRAY
- }
- }
+ public enum Container {
+ OBJECT,
+ COLLECTION,
+ MAP,
+ ARRAY
+ }
+ }
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -13,69 +13,56 @@
* @author Emmanuel Bernard
* @author John Griffin
*/
-public class DocumentExtractor
-{
- private final SearchFactoryImplementor searchFactoryImplementor;
- private final String[] projection;
+public class DocumentExtractor {
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final String[] projection;
- public DocumentExtractor(SearchFactoryImplementor searchFactoryImplementor, String... projection)
- {
- this.searchFactoryImplementor = searchFactoryImplementor;
- this.projection = projection;
- }
+ public DocumentExtractor(SearchFactoryImplementor searchFactoryImplementor, String... projection) {
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.projection = projection;
+ }
- private EntityInfo extract(Document document)
- {
- Class clazz = DocumentBuilder.getDocumentClass(document);
- Serializable id = DocumentBuilder.getDocumentId(searchFactoryImplementor, clazz, document);
- Object[] projected = null;
- if (projection != null && projection.length > 0)
- {
- projected = DocumentBuilder.getDocumentFields(searchFactoryImplementor, clazz, document, projection);
- }
- EntityInfo entityInfo = new EntityInfo(clazz, id, projected);
- return entityInfo;
- }
+ private EntityInfo extract(Document document) {
+ Class clazz = DocumentBuilder.getDocumentClass( document );
+ Serializable id = DocumentBuilder.getDocumentId( searchFactoryImplementor, clazz, document );
+ Object[] projected = null;
+ if ( projection != null && projection.length > 0 ) {
+ projected = DocumentBuilder.getDocumentFields( searchFactoryImplementor, clazz, document, projection );
+ }
+ EntityInfo entityInfo = new EntityInfo( clazz, id, projected );
+ return entityInfo;
+ }
- public EntityInfo extract(Hits hits, int index) throws IOException
- {
- Document doc = hits.doc(index);
- //TODO if we are only looking for score (unlikely), avoid accessing doc (lazy load)
- EntityInfo entityInfo = extract(doc);
- Object[] entityInfoProjection = entityInfo.projection; //Navin Surtani changed eip variable to entityInfoProjection.
+ public EntityInfo extract(Hits hits, int index) throws IOException {
+ Document doc = hits.doc( index );
+ //TODO if we are only looking for score (unlikely), avoid accessing doc (lazy load)
+ EntityInfo entityInfo = extract( doc );
+ Object[] eip = entityInfo.projection;
- if (entityInfoProjection != null && entityInfoProjection.length > 0)
- {
- for (int x = 0; x < projection.length; x++)
- {
- if (ProjectionConstants.SCORE.equals(projection[x]))
- {
- entityInfoProjection[x] = hits.score(index);
- }
- else if (ProjectionConstants.ID.equals(projection[x]))
- {
- entityInfoProjection[x] = entityInfo.id;
- }
- else if (ProjectionConstants.DOCUMENT.equals(projection[x]))
- {
- entityInfoProjection[x] = doc;
- }
- else if (ProjectionConstants.DOCUMENT_ID.equals(projection[x]))
- {
- entityInfoProjection[x] = hits.id(index);
- }
- else if (ProjectionConstants.BOOST.equals(projection[x]))
- {
- entityInfoProjection[x] = doc.getBoost();
- }
- else if (ProjectionConstants.THIS.equals(projection[x]))
- {
- //THIS could be projected more than once
- //THIS loading delayed to the Loader phase
- entityInfo.indexesOfThis.add(x);
- }
- }
- }
- return entityInfo;
- }
+ if ( eip != null && eip.length > 0 ) {
+ for (int x = 0; x < projection.length; x++) {
+ if ( ProjectionConstants.SCORE.equals( projection[x] ) ) {
+ eip[x] = hits.score( index );
+ }
+ else if ( ProjectionConstants.ID.equals( projection[x] ) ) {
+ eip[x] = entityInfo.id;
+ }
+ else if ( ProjectionConstants.DOCUMENT.equals( projection[x] ) ) {
+ eip[x] = doc;
+ }
+ else if ( ProjectionConstants.DOCUMENT_ID.equals( projection[x] ) ) {
+ eip[x] = hits.id( index );
+ }
+ else if ( ProjectionConstants.BOOST.equals( projection[x] ) ) {
+ eip[x] = doc.getBoost();
+ }
+ else if ( ProjectionConstants.THIS.equals( projection[x] ) ) {
+ //THIS could be projected more than once
+ //THIS loading delayed to the Loader phase
+ entityInfo.indexesOfThis.add(x);
+ }
+ }
+ }
+ return entityInfo;
+ }
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/FilterDef.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/FilterDef.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/FilterDef.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -7,18 +7,32 @@
import java.util.Map;
import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.CachingWrapperFilter;
/**
+ * A wrapper class which encapsualtes all required information to create a defined filter.
+ *
* @author Emmanuel Bernard
*/
//TODO serialization
+ at SuppressWarnings("unchecked")
public class FilterDef {
private Class impl;
private Method factoryMethod;
private Method keyMethod;
private Map<String, Method> setters = new HashMap<String, Method>();
private boolean cache;
+ private CachingWrapperFilter useCachingWrapperFilter;
+ public CachingWrapperFilter getUseCachingWrapperFilter() {
+ return useCachingWrapperFilter;
+ }
+
+ public void setUseCachingWrapperFilter(
+ CachingWrapperFilter useCachingWrapperFilter) {
+ this.useCachingWrapperFilter = useCachingWrapperFilter;
+ }
+
public Class getImpl() {
return impl;
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/QueryLoader.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/QueryLoader.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/engine/QueryLoader.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -16,12 +16,12 @@
* @author Emmanuel Bernard
*/
public class QueryLoader implements Loader {
- private final Logger log = LoggerFactory.getLogger( QueryLoader.class );
private Session session;
private Class entityType;
private SearchFactoryImplementor searchFactoryImplementor;
private Criteria criteria;
+ private boolean isExplicitCriteria;
public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
this.session = session;
@@ -33,6 +33,8 @@
}
public Object load(EntityInfo entityInfo) {
+ //if explicit criteria, make sure to use it to load the objects
+ if ( isExplicitCriteria ) load( new EntityInfo[] { entityInfo } );
return ObjectLoaderHelper.load( entityInfo, session );
}
@@ -46,6 +48,7 @@
}
public void setCriteria(Criteria criteria) {
+ isExplicitCriteria = criteria != null;
this.criteria = criteria;
}
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -1,71 +1,44 @@
-//$
+// $Id:$
package org.hibernate.search.event;
-import java.io.Serializable;
-
-import org.hibernate.event.PostCollectionRecreateEventListener;
-import org.hibernate.event.PostCollectionRemoveEventListener;
-import org.hibernate.event.PostCollectionUpdateEventListener;
import org.hibernate.event.PostCollectionRecreateEvent;
+import org.hibernate.event.PostCollectionRecreateEventListener;
import org.hibernate.event.PostCollectionRemoveEvent;
+import org.hibernate.event.PostCollectionRemoveEventListener;
import org.hibernate.event.PostCollectionUpdateEvent;
-import org.hibernate.event.AbstractCollectionEvent;
-import org.hibernate.search.backend.WorkType;
-import org.hibernate.engine.EntityEntry;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.hibernate.event.PostCollectionUpdateEventListener;
/**
- * Support collection event listening (starts from hibernate core 3.2.6)
- * FIXME deprecate as soon as we target Core 3.3 and merge back into the superclass
- *
* @author Emmanuel Bernard
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener}
*/
+ at SuppressWarnings("serial")
+ at Deprecated
public class FullTextIndexCollectionEventListener extends FullTextIndexEventListener
implements PostCollectionRecreateEventListener,
PostCollectionRemoveEventListener,
PostCollectionUpdateEventListener {
- private final Logger log = LoggerFactory.getLogger( FullTextIndexCollectionEventListener.class );
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostRecreateCollection(PostCollectionRecreateEvent)}
+ */
+ @Deprecated
public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
processCollectionEvent( event );
}
- private void processCollectionEvent(AbstractCollectionEvent event) {
- Object entity = event.getAffectedOwnerOrNull();
- if ( entity == null ) {
- //Hibernate cannot determine every single time the owner especially incase detached objects are involved
- // or property-ref is used
- //Should log really but we don't know if we're interested in this collection for indexing
- return;
- }
- if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( entity.getClass() ) ) {
- Serializable id = getId( entity, event );
- if (id == null) {
- log.warn(
- "Unable to reindex entity on collection change, id cannot be extracted: {}",
- event.getAffectedOwnerEntityName()
- );
- return;
- }
- processWork( entity, id, WorkType.COLLECTION, event );
- }
- }
-
- private Serializable getId(Object entity, AbstractCollectionEvent event) {
- Serializable id = event.getAffectedOwnerIdOrNull();
- if ( id == null ) {
- //most likely this recovery is unnecessary since Hibernate Core probably try that
- EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( entity );
- id = entityEntry == null ? null : entityEntry.getId();
- }
- return id;
- }
-
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostRemoveCollection(PostCollectionRemoveEvent)}
+ */
+ @Deprecated
public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
processCollectionEvent( event );
}
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostUpdateCollection(PostCollectionUpdateEvent)}
+ */
+ @Deprecated
public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
processCollectionEvent( event );
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -2,22 +2,17 @@
package org.hibernate.search.event;
import org.hibernate.cfg.Configuration;
-import org.hibernate.event.AbstractEvent;
-import org.hibernate.event.Destructible;
-import org.hibernate.event.Initializable;
-import org.hibernate.event.PostDeleteEvent;
-import org.hibernate.event.PostDeleteEventListener;
-import org.hibernate.event.PostInsertEvent;
-import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.event.PostUpdateEvent;
-import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.event.*;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.cfg.Cfg;
import org.hibernate.search.transaction.EventSourceTransactionContext;
-import org.hibernate.search.cfg.Cfg;
+import org.hibernate.engine.EntityEntry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.Serializable;
@@ -31,10 +26,14 @@
*/
//TODO work on sharing the same indexWriters and readers across a single post operation...
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
-public class FullTextIndexEventListener implements PostDeleteEventListener, PostInsertEventListener,
- PostUpdateEventListener, Initializable, Destructible
-{
- @SuppressWarnings({"WeakerAccess"})
+ at SuppressWarnings("serial")
+public class FullTextIndexEventListener implements PostDeleteEventListener,
+ PostInsertEventListener, PostUpdateEventListener,
+ PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener, Initializable, Destructible {
+
+ private static final Logger log = LoggerFactory.getLogger(FullTextIndexEventListener.class);
+
protected boolean used;
protected SearchFactoryImplementor searchFactoryImplementor;
@@ -112,4 +111,47 @@
{
searchFactoryImplementor.close();
}
+
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
+ processCollectionEvent( event );
+ }
+
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
+ processCollectionEvent( event );
+ }
+
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
+ processCollectionEvent( event );
+ }
+
+ protected void processCollectionEvent(AbstractCollectionEvent event) {
+ Object entity = event.getAffectedOwnerOrNull();
+ if ( entity == null ) {
+ //Hibernate cannot determine every single time the owner especially in case detached objects are involved
+ // or property-ref is used
+ //Should log really but we don't know if we're interested in this collection for indexing
+ return;
+ }
+ if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( entity.getClass() ) ) {
+ Serializable id = getId( entity, event );
+ if (id == null) {
+ log.warn(
+ "Unable to reindex entity on collection change, id cannot be extracted: {}",
+ event.getAffectedOwnerEntityName()
+ );
+ return;
+ }
+ processWork( entity, id, WorkType.COLLECTION, event );
+ }
+ }
+
+ private Serializable getId(Object entity, AbstractCollectionEvent event) {
+ Serializable id = event.getAffectedOwnerIdOrNull();
+ if ( id == null ) {
+ //most likely this recovery is unnecessary since Hibernate Core probably try that
+ EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( entity );
+ id = entityEntry == null ? null : entityEntry.getId();
+ }
+ return id;
+ }
}
Added: search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/CachingWrapperFilter.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/CachingWrapperFilter.java (rev 0)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/CachingWrapperFilter.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -0,0 +1,73 @@
+package org.hibernate.search.filter;
+
+import java.io.IOException;
+import java.util.BitSet;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.Filter;
+import org.hibernate.util.SoftLimitMRUCache;
+
+/**
+ * A slightly different version of Lucene's original <code>CachingWrapperFilter</code> which
+ * uses <code>SoftReferences</code> instead of <code>WeakReferences</code> in order to cache
+ * the filter <code>BitSet</code>.
+ *
+ * @author Hardy Ferentschik
+ * @see org.apache.lucene.search.CachingWrapperFilter
+ * @see <a href="http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-174">HSEARCH-174</a>
+ */
+ at SuppressWarnings("serial")
+public class CachingWrapperFilter extends Filter {
+
+ private static final int DEFAULT_SIZE = 5;
+
+ /**
+ * The cache using soft references in order to store the filter bit sets.
+ */
+ protected transient SoftLimitMRUCache cache;
+
+ protected Filter filter;
+
+ /**
+ * @param filter
+ * Filter to cache results of
+ */
+ public CachingWrapperFilter(Filter filter) {
+ this.filter = filter;
+ }
+
+ public BitSet bits(IndexReader reader) throws IOException {
+ if (cache == null) {
+ cache = new SoftLimitMRUCache(DEFAULT_SIZE);
+ }
+
+ synchronized (cache) { // check cache
+ BitSet cached = (BitSet) cache.get(reader);
+ if (cached != null) {
+ return cached;
+ }
+ }
+
+ final BitSet bits = filter.bits(reader);
+
+ synchronized (cache) { // update cache
+ cache.put(reader, bits);
+ }
+
+ return bits;
+ }
+
+ public String toString() {
+ return this.getClass().getName() + "(" + filter + ")";
+ }
+
+ public boolean equals(Object o) {
+ if (!(o instanceof CachingWrapperFilter))
+ return false;
+ return this.filter.equals(((CachingWrapperFilter) o).filter);
+ }
+
+ public int hashCode() {
+ return filter.hashCode() ^ 0x1117BF25;
+ }
+}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterCachingStrategy.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterCachingStrategy.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterCachingStrategy.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -7,6 +7,7 @@
/**
* Defines the caching filter strategy
+ * implementations of getCachedFilter and addCachedFilter must be thread-safe
*
* @author Emmanuel Bernard
*/
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterKey.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterKey.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/filter/FilterKey.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -4,13 +4,16 @@
/**
* The key object must implement equals / hashcode so that 2 keys are equals if and only if
* the given Filter types are the same and the set of parameters are the same.
- *
+ * <p/>
* The FilterKey creator (ie the @Key method) does not have to inject <code>impl</code>
* It will be done by Hibernate Search
*
* @author Emmanuel Bernard
*/
public abstract class FilterKey {
+ // FilterKey implementations do not have to be thread-safe as FilterCachingStrategy ensure
+ // a memory barrier between usages
+ //
private Class impl;
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/InitContext.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/InitContext.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/InitContext.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -14,7 +14,6 @@
import org.hibernate.search.SearchException;
import org.hibernate.search.Environment;
import org.hibernate.search.cfg.Cfg;
-import org.hibernate.search.cfg.CfgImpl;
import org.hibernate.search.util.DelegateNamedAnalyzer;
import org.hibernate.cfg.Configuration;
import org.hibernate.util.ReflectHelper;
@@ -28,231 +27,176 @@
/**
* @author Emmanuel Bernard
*/
-public class InitContext
-{
- private final Map<String, AnalyzerDef> analyzerDefs = new HashMap<String, AnalyzerDef>();
- private final List<DelegateNamedAnalyzer> lazyAnalyzers = new ArrayList<DelegateNamedAnalyzer>();
- private final Analyzer defaultAnalyzer;
- private final Similarity defaultSimilarity;
+public class InitContext {
+ private final Map<String, AnalyzerDef> analyzerDefs = new HashMap<String, AnalyzerDef>();
+ private final List<DelegateNamedAnalyzer> lazyAnalyzers = new ArrayList<DelegateNamedAnalyzer>();
+ private final Analyzer defaultAnalyzer;
+ private final Similarity defaultSimilarity;
- public InitContext(Configuration hibernateConfig)
- {
- this(new CfgImpl(hibernateConfig));
- }
+ public InitContext(Cfg cfg) {
+ defaultAnalyzer = initAnalyzer(cfg);
+ defaultSimilarity = initSimilarity(cfg);
+ }
- public InitContext(Cfg cfg)
- {
- defaultAnalyzer = initAnalyzer(cfg);
- defaultSimilarity = initSimilarity(cfg);
+ public void addAnalyzerDef(AnalyzerDef ann) {
+ //FIXME somehow remember where the analyzerDef comes from and raise an exception if an analyzerDef
+ //with the same name from two different places are added
+ //multiple adding from the same place is required to deal with inheritance hierarchy processed multiple times
+ if ( ann != null && analyzerDefs.put( ann.name(), ann ) != null ) {
+ //throw new SearchException("Multiple AnalyzerDef with the same name: " + name);
+ }
+ }
+ public Analyzer buildLazyAnalyzer(String name) {
+ final DelegateNamedAnalyzer delegateNamedAnalyzer = new DelegateNamedAnalyzer( name );
+ lazyAnalyzers.add(delegateNamedAnalyzer);
+ return delegateNamedAnalyzer;
+ }
- }
+ public List<DelegateNamedAnalyzer> getLazyAnalyzers() {
+ return lazyAnalyzers;
+ }
- public void addAnalyzerDef(AnalyzerDef ann)
- {
- //FIXME somehow remember where the analyzerDef comes from and raise an exception if an analyzerDef
- //with the same name from two different places are added
- //multiple adding from the same place is required to deal with inheritance hierarchy processed multiple times
- if (ann != null && analyzerDefs.put(ann.name(), ann) != null)
- {
- //throw new SearchException("Multiple AnalyzerDef with the same name: " + name);
- }
- }
-
- public Analyzer buildLazyAnalyzer(String name)
- {
- final DelegateNamedAnalyzer delegateNamedAnalyzer = new DelegateNamedAnalyzer(name);
- lazyAnalyzers.add(delegateNamedAnalyzer);
- return delegateNamedAnalyzer;
- }
-
- public List<DelegateNamedAnalyzer> getLazyAnalyzers()
- {
- return lazyAnalyzers;
- }
-
- /**
- * Initializes the Lucene analyzer to use by reading the analyzer class from the configuration and instantiating it.
- *
- * @param cfg The current configuration.
- * @return The Lucene analyzer to use for tokenisation.
- */
- private Analyzer initAnalyzer(Cfg cfg)
- {
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty(Environment.ANALYZER_CLASS);
- if (analyzerClassName != null)
- {
- try
- {
- analyzerClass = ReflectHelper.classForName(analyzerClassName);
- }
- catch (Exception e)
- {
- return buildLazyAnalyzer(analyzerClassName);
+ /**
+ * Initializes the Lucene analyzer to use by reading the analyzer class from the configuration and instantiating it.
+ *
+ * @param cfg
+ * The current configuration.
+ * @return The Lucene analyzer to use for tokenisation.
+ */
+ private Analyzer initAnalyzer(Cfg cfg) {
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS);
+ if (analyzerClassName != null) {
+ try {
+ analyzerClass = ReflectHelper.classForName(analyzerClassName);
+ } catch (Exception e) {
+ return buildLazyAnalyzer( analyzerClassName );
// throw new SearchException("Lucene analyzer class '" + analyzerClassName + "' defined in property '"
// + Environment.ANALYZER_CLASS + "' could not be found.", e);
- }
- }
- else
- {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer defaultAnalyzer;
- try
- {
- defaultAnalyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e)
- {
- throw new SearchException("Lucene analyzer does not implement " + Analyzer.class.getName() + ": "
- + analyzerClassName, e);
- }
- catch (Exception e)
- {
- throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClassName, e);
- }
- return defaultAnalyzer;
- }
+ }
+ } else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer defaultAnalyzer;
+ try {
+ defaultAnalyzer = (Analyzer) analyzerClass.newInstance();
+ } catch (ClassCastException e) {
+ throw new SearchException("Lucene analyzer does not implement " + Analyzer.class.getName() + ": "
+ + analyzerClassName, e);
+ } catch (Exception e) {
+ throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClassName, e);
+ }
+ return defaultAnalyzer;
+ }
- /**
- * Initializes the Lucene similarity to use
- */
- private Similarity initSimilarity(Cfg cfg)
- {
- Class similarityClass;
- String similarityClassName = cfg.getProperty(Environment.SIMILARITY_CLASS);
- if (similarityClassName != null)
- {
- try
- {
- similarityClass = ReflectHelper.classForName(similarityClassName);
- }
- catch (Exception e)
- {
- throw new SearchException("Lucene Similarity class '" + similarityClassName + "' defined in property '"
- + Environment.SIMILARITY_CLASS + "' could not be found.", e);
- }
- }
- else
- {
- similarityClass = null;
- }
+ /**
+ * Initializes the Lucene similarity to use
+ */
+ private Similarity initSimilarity(Cfg cfg) {
+ Class similarityClass;
+ String similarityClassName = cfg.getProperty(Environment.SIMILARITY_CLASS);
+ if (similarityClassName != null) {
+ try {
+ similarityClass = ReflectHelper.classForName(similarityClassName);
+ } catch (Exception e) {
+ throw new SearchException("Lucene Similarity class '" + similarityClassName + "' defined in property '"
+ + Environment.SIMILARITY_CLASS + "' could not be found.", e);
+ }
+ }
+ else {
+ similarityClass = null;
+ }
- // Initialize similarity
- if (similarityClass == null)
- {
- return Similarity.getDefault();
- }
- else
- {
- Similarity defaultSimilarity;
- try
- {
- defaultSimilarity = (Similarity) similarityClass.newInstance();
- }
- catch (ClassCastException e)
- {
- throw new SearchException("Lucene similarity does not extend " + Similarity.class.getName() + ": "
- + similarityClassName, e);
- }
- catch (Exception e)
- {
- throw new SearchException("Failed to instantiate lucene similarity with type " + similarityClassName, e);
- }
- return defaultSimilarity;
- }
- }
+ // Initialize similarity
+ if ( similarityClass == null ) {
+ return Similarity.getDefault();
+ }
+ else {
+ Similarity defaultSimilarity;
+ try {
+ defaultSimilarity = (Similarity) similarityClass.newInstance();
+ } catch (ClassCastException e) {
+ throw new SearchException("Lucene similarity does not extend " + Similarity.class.getName() + ": "
+ + similarityClassName, e);
+ } catch (Exception e) {
+ throw new SearchException("Failed to instantiate lucene similarity with type " + similarityClassName, e);
+ }
+ return defaultSimilarity;
+ }
+ }
- public Analyzer getDefaultAnalyzer()
- {
- return defaultAnalyzer;
- }
+ public Analyzer getDefaultAnalyzer() {
+ return defaultAnalyzer;
+ }
- public Similarity getDefaultSimilarity()
- {
- return defaultSimilarity;
- }
+ public Similarity getDefaultSimilarity() {
+ return defaultSimilarity;
+ }
- public Map<String, Analyzer> initLazyAnalyzers()
- {
- Map<String, Analyzer> initializedAnalyzers = new HashMap<String, Analyzer>(analyzerDefs.size());
+ public Map<String, Analyzer> initLazyAnalyzers() {
+ Map<String, Analyzer> initializedAnalyzers = new HashMap<String, Analyzer>( analyzerDefs.size() );
- for (DelegateNamedAnalyzer namedAnalyzer : lazyAnalyzers)
- {
- String name = namedAnalyzer.getName();
- if (initializedAnalyzers.containsKey(name))
- {
- namedAnalyzer.setDelegate(initializedAnalyzers.get(name));
- }
- else
- {
- if (analyzerDefs.containsKey(name))
- {
- final Analyzer analyzer = buildAnalyzer(analyzerDefs.get(name));
- namedAnalyzer.setDelegate(analyzer);
- initializedAnalyzers.put(name, analyzer);
- }
- else
- {
- throw new SearchException("Analyzer found with an unknown definition: " + name);
- }
- }
- }
+ for (DelegateNamedAnalyzer namedAnalyzer : lazyAnalyzers) {
+ String name = namedAnalyzer.getName();
+ if ( initializedAnalyzers.containsKey( name ) ) {
+ namedAnalyzer.setDelegate( initializedAnalyzers.get( name ) );
+ }
+ else {
+ if ( analyzerDefs.containsKey( name ) ) {
+ final Analyzer analyzer = buildAnalyzer( analyzerDefs.get( name ) );
+ namedAnalyzer.setDelegate( analyzer );
+ initializedAnalyzers.put( name, analyzer );
+ }
+ else {
+ throw new SearchException("Analyzer found with an unknown definition: " + name);
+ }
+ }
+ }
- //initialize the remaining definitions
- for (Map.Entry<String, AnalyzerDef> entry : analyzerDefs.entrySet())
- {
- if (!initializedAnalyzers.containsKey(entry.getKey()))
- {
- final Analyzer analyzer = buildAnalyzer(entry.getValue());
- initializedAnalyzers.put(entry.getKey(), analyzer);
- }
- }
- return Collections.unmodifiableMap(initializedAnalyzers);
- }
+ //initialize the remaining definitions
+ for ( Map.Entry<String, AnalyzerDef> entry : analyzerDefs.entrySet() ) {
+ if ( ! initializedAnalyzers.containsKey( entry.getKey() ) ) {
+ final Analyzer analyzer = buildAnalyzer( entry.getValue() );
+ initializedAnalyzers.put( entry.getKey(), analyzer );
+ }
+ }
+ return Collections.unmodifiableMap( initializedAnalyzers );
+ }
- private Analyzer buildAnalyzer(AnalyzerDef analyzerDef)
- {
- TokenizerDef token = analyzerDef.tokenizer();
- TokenizerFactory tokenFactory = (TokenizerFactory) instantiate(token.factory());
- tokenFactory.init(getMapOfParameters(token.params()));
+ private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
+ TokenizerDef token = analyzerDef.tokenizer();
+ TokenizerFactory tokenFactory = (TokenizerFactory) instantiate( token.factory() );
+ tokenFactory.init( getMapOfParameters( token.params() ) );
- final int length = analyzerDef.filters().length;
- TokenFilterFactory[] filters = new TokenFilterFactory[length];
- for (int index = 0; index < length; index++)
- {
- TokenFilterDef filterDef = analyzerDef.filters()[index];
- filters[index] = (TokenFilterFactory) instantiate(filterDef.factory());
- filters[index].init(getMapOfParameters(filterDef.params()));
- }
- return new TokenizerChain(tokenFactory, filters);
- }
+ final int length = analyzerDef.filters().length;
+ TokenFilterFactory[] filters = new TokenFilterFactory[length];
+ for ( int index = 0 ; index < length ; index++ ) {
+ TokenFilterDef filterDef = analyzerDef.filters()[index];
+ filters[index] = (TokenFilterFactory) instantiate( filterDef.factory() );
+ filters[index].init( getMapOfParameters( filterDef.params() ) );
+ }
+ return new TokenizerChain(tokenFactory, filters);
+ }
- private Object instantiate(Class clazz)
- {
- try
- {
- return clazz.newInstance();
- }
- catch (IllegalAccessException e)
- {
- throw new SearchException("Unable to instantiate class: " + clazz, e);
- }
- catch (InstantiationException e)
- {
- throw new SearchException("Unable to instantiate class: " + clazz, e);
- }
- }
+ private Object instantiate(Class clazz) {
+ try {
+ return clazz.newInstance();
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instantiate class: " + clazz, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instantiate class: " + clazz, e );
+ }
+ }
- private Map<String, String> getMapOfParameters(Parameter[] params)
- {
- Map<String, String> mapOfParams = new HashMap<String, String>(params.length);
- for (Parameter param : params)
- {
- mapOfParams.put(param.name(), param.value());
- }
- return Collections.unmodifiableMap(mapOfParams );
+ private Map<String, String> getMapOfParameters(Parameter[] params) {
+ Map<String, String> mapOfParams = new HashMap<String, String>( params.length );
+ for (Parameter param : params) {
+ mapOfParams.put( param.name(), param.value() );
+ }
+ return Collections.unmodifiableMap( mapOfParams );
}
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -56,413 +56,339 @@
/**
* @author Emmanuel Bernard
*/
-public class SearchFactoryImpl implements SearchFactoryImplementor
-{
- private static final ThreadLocal<WeakHashMap<Cfg, SearchFactoryImpl>> contexts =
- new ThreadLocal<WeakHashMap<Cfg, SearchFactoryImpl>>();
+public class SearchFactoryImpl implements SearchFactoryImplementor {
+ private static final ThreadLocal<WeakHashMap<Cfg, SearchFactoryImpl>> contexts =
+ new ThreadLocal<WeakHashMap<Cfg, SearchFactoryImpl>>();
- static
- {
- Version.touch();
- }
+ static {
+ Version.touch();
+ }
- private final Logger log = LoggerFactory.getLogger(SearchFactoryImpl.class);
+ private final Logger log = LoggerFactory.getLogger( SearchFactoryImpl.class );
- private final Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
- //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
- private final Map<DirectoryProvider, DirectoryProviderData> dirProviderData = new HashMap<DirectoryProvider, DirectoryProviderData>();
- private final Worker worker;
- private final ReaderProvider readerProvider;
- private BackendQueueProcessorFactory backendQueueProcessorFactory;
- private final Map<String, FilterDef> filterDefinitions = new HashMap<String, FilterDef>();
- private final FilterCachingStrategy filterCachingStrategy;
- private Map<String, Analyzer> analyzers;
- private final AtomicBoolean stopped = new AtomicBoolean(false);
+ private final Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+ private final Map<DirectoryProvider, DirectoryProviderData> dirProviderData = new HashMap<DirectoryProvider, DirectoryProviderData>();
+ private final Worker worker;
+ private final ReaderProvider readerProvider;
+ private BackendQueueProcessorFactory backendQueueProcessorFactory;
+ private final Map<String, FilterDef> filterDefinitions = new HashMap<String, FilterDef>();
+ private final FilterCachingStrategy filterCachingStrategy;
+ private Map<String, Analyzer> analyzers;
+ private final AtomicBoolean stopped = new AtomicBoolean( false );
- /**
- * Each directory provider (index) can have its own performance settings.
- */
- private Map<DirectoryProvider, LuceneIndexingParameters> dirProviderIndexingParams =
- new HashMap<DirectoryProvider, LuceneIndexingParameters>();
- private final String indexingStrategy;
+ /**
+ * Each directory provider (index) can have its own performance settings.
+ */
+ private Map<DirectoryProvider, LuceneIndexingParameters> dirProviderIndexingParams =
+ new HashMap<DirectoryProvider, LuceneIndexingParameters>();
+ private final String indexingStrategy;
- public BackendQueueProcessorFactory getBackendQueueProcessorFactory()
- {
- return backendQueueProcessorFactory;
- }
+ public BackendQueueProcessorFactory getBackendQueueProcessorFactory() {
+ return backendQueueProcessorFactory;
+ }
- public void setBackendQueueProcessorFactory(BackendQueueProcessorFactory backendQueueProcessorFactory)
- {
- this.backendQueueProcessorFactory = backendQueueProcessorFactory;
- }
+ public void setBackendQueueProcessorFactory(BackendQueueProcessorFactory backendQueueProcessorFactory) {
+ this.backendQueueProcessorFactory = backendQueueProcessorFactory;
+ }
- @SuppressWarnings("unchecked")
- public SearchFactoryImpl(Configuration hibernateConfig)
- {
- this (new CfgImpl(hibernateConfig));
+ public SearchFactoryImpl(Configuration hibernateConfiguration) {
+ this(new CfgImpl(hibernateConfiguration));
}
- public SearchFactoryImpl(Cfg cfg)
- {
- //yuk
- ReflectionManager reflectionManager = getReflectionManager(cfg);
- this.indexingStrategy = defineIndexingStrategy(cfg); //need to be done before the document builds
- initDocumentBuilders(cfg, reflectionManager);
+ @SuppressWarnings( "unchecked" )
+ public SearchFactoryImpl(Cfg cfg) {
+ //yuk
+ ReflectionManager reflectionManager = getReflectionManager( cfg );
+ this.indexingStrategy = defineIndexingStrategy( cfg ); //need to be done before the document builds
+ initDocumentBuilders( cfg, reflectionManager );
- Set<Class> indexedClasses = documentBuilders.keySet();
- for (DocumentBuilder builder : documentBuilders.values())
- {
- builder.postInitialize(indexedClasses);
- }
- this.worker = WorkerFactory.createWorker(cfg, this);
- this.readerProvider = ReaderProviderFactory.createReaderProvider(cfg, this);
- this.filterCachingStrategy = buildFilterCachingStrategy(cfg.getProperties());
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for (DocumentBuilder builder : documentBuilders.values()) {
+ builder.postInitialize( indexedClasses );
+ }
+ this.worker = WorkerFactory.createWorker( cfg, this );
+ this.readerProvider = ReaderProviderFactory.createReaderProvider( cfg, this );
+ this.filterCachingStrategy = buildFilterCachingStrategy( cfg.getProperties() );
+ }
+ private static String defineIndexingStrategy(Cfg cfg) {
+ String indexingStrategy = cfg.getProperties().getProperty( Environment.INDEXING_STRATEGY, "event" );
+ if ( ! ("event".equals( indexingStrategy ) || "manual".equals( indexingStrategy ) ) ) {
+ throw new SearchException( Environment.INDEXING_STRATEGY + " unknown: " + indexingStrategy );
+ }
+ return indexingStrategy;
+ }
- }
+ public String getIndexingStrategy() {
+ return indexingStrategy;
+ }
- private static String defineIndexingStrategy(Cfg cfg)
- {
- String indexingStrategy = cfg.getProperties().getProperty(Environment.INDEXING_STRATEGY, "event");
- if (!("event".equals(indexingStrategy) || "manual".equals(indexingStrategy)))
- {
- throw new SearchException(Environment.INDEXING_STRATEGY + " unknown: " + indexingStrategy);
- }
- return indexingStrategy;
- }
+ public void close() {
+ if ( stopped.compareAndSet( false, true) ) {
+ try {
+ worker.close();
+ }
+ catch (Exception e) {
+ log.error( "Worker raises an exception on close()", e );
+ }
+ //TODO move to DirectoryProviderFactory for cleaner
+ for (DirectoryProvider dp : getDirectoryProviders() ) {
+ try {
+ dp.stop();
+ }
+ catch (Exception e) {
+ log.error( "DirectoryProvider raises an exception on stop() ", e );
+ }
+ }
+ }
+ }
- public String getIndexingStrategy()
- {
- return indexingStrategy;
- }
+ public void addClassToDirectoryProvider(Class clazz, DirectoryProvider<?> directoryProvider) {
+ DirectoryProviderData data = dirProviderData.get(directoryProvider);
+ if (data == null) {
+ data = new DirectoryProviderData();
+ dirProviderData.put( directoryProvider, data );
+ }
+ data.classes.add(clazz);
+ }
- public void close()
- {
- if (stopped.compareAndSet(false, true))
- {
- try
- {
- worker.close();
- }
- catch (Exception e)
- {
- log.error("Worker raises an exception on close()", e);
- }
- //TODO move to DirectoryProviderFactory for cleaner
- for (DirectoryProvider dp : getDirectoryProviders())
- {
- try
- {
- dp.stop();
- }
- catch (Exception e)
- {
- log.error("DirectoryProvider raises an exception on stop() ", e);
- }
- }
- }
- }
+ public Set<Class> getClassesInDirectoryProvider(DirectoryProvider<?> directoryProvider) {
+ return Collections.unmodifiableSet( dirProviderData.get(directoryProvider).classes );
+ }
- public void addClassToDirectoryProvider(Class clazz, DirectoryProvider<?> directoryProvider)
- {
- DirectoryProviderData data = dirProviderData.get(directoryProvider);
- if (data == null)
- {
- data = new DirectoryProviderData();
- dirProviderData.put(directoryProvider, data);
- }
- data.classes.add(clazz);
- }
+ private void bindFilterDefs(XClass mappedXClass) {
+ FullTextFilterDef defAnn = mappedXClass.getAnnotation( FullTextFilterDef.class );
+ if ( defAnn != null ) {
+ bindFilterDef( defAnn, mappedXClass );
+ }
+ FullTextFilterDefs defsAnn = mappedXClass.getAnnotation( FullTextFilterDefs.class );
+ if (defsAnn != null) {
+ for ( FullTextFilterDef def : defsAnn.value() ) {
+ bindFilterDef( def, mappedXClass );
+ }
+ }
+ }
- public Set<Class> getClassesInDirectoryProvider(DirectoryProvider<?> directoryProvider)
- {
- return Collections.unmodifiableSet(dirProviderData.get(directoryProvider).classes);
- }
+ private void bindFilterDef(FullTextFilterDef defAnn, XClass mappedXClass) {
+ if ( filterDefinitions.containsKey( defAnn.name() ) ) {
+ throw new SearchException("Multiple definition of @FullTextFilterDef.name=" + defAnn.name() + ": "
+ + mappedXClass.getName() );
+ }
+ FilterDef filterDef = new FilterDef();
+ filterDef.setImpl( defAnn.impl() );
+ filterDef.setCache( defAnn.cache() );
+ filterDef.setUseCachingWrapperFilter( defAnn.useCachingWrapperFilter() );
+ try {
+ filterDef.getImpl().newInstance();
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
+ }
+ catch (InstantiationException e) {
+ throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
+ }
+ for ( Method method : filterDef.getImpl().getMethods() ) {
+ if ( method.isAnnotationPresent( Factory.class ) ) {
+ if ( filterDef.getFactoryMethod() != null ) {
+ throw new SearchException("Multiple @Factory methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName() );
+ }
+ if ( !method.isAccessible() ) method.setAccessible( true );
+ filterDef.setFactoryMethod( method );
+ }
+ if ( method.isAnnotationPresent( Key.class ) ) {
+ if ( filterDef.getKeyMethod() != null ) {
+ throw new SearchException("Multiple @Key methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName() );
+ }
+ if ( !method.isAccessible() ) method.setAccessible( true );
+ filterDef.setKeyMethod( method );
+ }
- private void bindFilterDefs(XClass mappedXClass)
- {
- FullTextFilterDef defAnn = mappedXClass.getAnnotation(FullTextFilterDef.class);
- if (defAnn != null)
- {
- bindFilterDef(defAnn, mappedXClass);
- }
- FullTextFilterDefs defsAnn = mappedXClass.getAnnotation(FullTextFilterDefs.class);
- if (defsAnn != null)
- {
- for (FullTextFilterDef def : defsAnn.value())
- {
- bindFilterDef(def, mappedXClass);
- }
- }
- }
+ String name = method.getName();
+ if ( name.startsWith( "set" ) && method.getParameterTypes().length == 1 ) {
+ filterDef.addSetter( Introspector.decapitalize( name.substring( 3 ) ), method );
+ }
+ }
+ filterDefinitions.put( defAnn.name(), filterDef );
+ }
- private void bindFilterDef(FullTextFilterDef defAnn, XClass mappedXClass)
- {
- if (filterDefinitions.containsKey(defAnn.name()))
- {
- throw new SearchException("Multiple definition of @FullTextFilterDef.name=" + defAnn.name() + ": "
- + mappedXClass.getName());
- }
- FilterDef filterDef = new FilterDef();
- filterDef.setImpl(defAnn.impl());
- filterDef.setCache(defAnn.cache());
- try
- {
- filterDef.getImpl().newInstance();
- }
- catch (IllegalAccessException e)
- {
- throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
- }
- catch (InstantiationException e)
- {
- throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
- }
- for (Method method : filterDef.getImpl().getMethods())
- {
- if (method.isAnnotationPresent(Factory.class))
- {
- if (filterDef.getFactoryMethod() != null)
- {
- throw new SearchException("Multiple @Factory methods found" + defAnn.name() + ": "
- + filterDef.getImpl().getName() + "." + method.getName());
- }
- if (!method.isAccessible()) method.setAccessible(true);
- filterDef.setFactoryMethod(method);
- }
- if (method.isAnnotationPresent(Key.class))
- {
- if (filterDef.getKeyMethod() != null)
- {
- throw new SearchException("Multiple @Key methods found" + defAnn.name() + ": "
- + filterDef.getImpl().getName() + "." + method.getName());
- }
- if (!method.isAccessible()) method.setAccessible(true);
- filterDef.setKeyMethod(method);
- }
+ //code doesn't have to be multithreaded because SF creation is not.
+ //this is not a public API, should really only be used during the SessionFActory building
+ //FIXME this is ugly, impl.staticmethod, fix that
+ public static SearchFactoryImpl getSearchFactory(Cfg cfg) {
+ WeakHashMap<Cfg, SearchFactoryImpl> contextMap = contexts.get();
+ if ( contextMap == null ) {
+ contextMap = new WeakHashMap<Cfg, SearchFactoryImpl>( 2 );
+ contexts.set( contextMap );
+ }
+ SearchFactoryImpl searchFactory = contextMap.get( cfg );
+ if ( searchFactory == null ) {
+ searchFactory = new SearchFactoryImpl( cfg );
+ contextMap.put( cfg, searchFactory );
+ }
+ return searchFactory;
+ }
- String name = method.getName();
- if (name.startsWith("set") && method.getParameterTypes().length == 1)
- {
- filterDef.addSetter(Introspector.decapitalize(name.substring(3)), method);
- }
- }
- filterDefinitions.put(defAnn.name(), filterDef);
- }
- //code doesn't have to be multithreaded because SF creation is not.
- //this is not a public API, should really only be used during the SessionFActory building
- //FIXME this is ugly, impl.staticmethod, fix that
- public static SearchFactoryImpl getSearchFactory(Cfg cfg)
- {
- WeakHashMap<Cfg, SearchFactoryImpl> contextMap = contexts.get();
- if (contextMap == null)
- {
- contextMap = new WeakHashMap<Cfg, SearchFactoryImpl>(2);
- contexts.set(contextMap);
- }
- SearchFactoryImpl searchFactory = contextMap.get(cfg);
- if (searchFactory == null)
- {
- searchFactory = new SearchFactoryImpl(cfg);
- contextMap.put(cfg, searchFactory);
- }
- return searchFactory;
- }
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
+ public Set<DirectoryProvider> getDirectoryProviders() {
+ return this.dirProviderData.keySet();
+ }
- public Map<Class, DocumentBuilder<Object>> getDocumentBuilders()
- {
- return documentBuilders;
- }
+ public Worker getWorker() {
+ return worker;
+ }
- public Set<DirectoryProvider> getDirectoryProviders()
- {
- return this.dirProviderData.keySet();
- }
+ public void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy) {
+ DirectoryProviderData data = dirProviderData.get(provider);
+ if (data == null) {
+ data = new DirectoryProviderData();
+ dirProviderData.put( provider, data );
+ }
+ data.optimizerStrategy = optimizerStrategy;
+ }
- public Worker getWorker()
- {
- return worker;
- }
+ public void addIndexingParameters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams) {
+ dirProviderIndexingParams.put( provider, indexingParams );
+ }
- public void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy)
- {
- DirectoryProviderData data = dirProviderData.get(provider);
- if (data == null)
- {
- data = new DirectoryProviderData();
- dirProviderData.put(provider, data);
- }
- data.optimizerStrategy = optimizerStrategy;
- }
+ public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider) {
+ return dirProviderData.get( provider ).optimizerStrategy;
+ }
- public void addIndexingParameters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams)
- {
- dirProviderIndexingParams.put(provider, indexingParams);
- }
+ public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider ) {
+ return dirProviderIndexingParams.get( provider );
+ }
- public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider)
- {
- return dirProviderData.get(provider).optimizerStrategy;
- }
+ public ReaderProvider getReaderProvider() {
+ return readerProvider;
+ }
- public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider)
- {
- return dirProviderIndexingParams.get(provider);
- }
+ //not happy about having it as a helper class but I don't want cfg to be associated with the SearchFactory
+ public static ReflectionManager getReflectionManager(Cfg cfg) {
+ ReflectionManager reflectionManager;
+ try {
+ //TODO introduce a ReflectionManagerHolder interface to avoid reflection
+ //I want to avoid hard link between HAN and Validator for such a simple need
+ //reuse the existing reflectionManager one when possible
+ reflectionManager =
+ (ReflectionManager) cfg.getClass().getMethod( "getReflectionManager" ).invoke( cfg );
- public ReaderProvider getReaderProvider()
- {
- return readerProvider;
- }
+ }
+ catch (Exception e) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ return reflectionManager;
+ }
- //not happy about having it as a helper class but I don't want cfg to be associated with the SearchFactory
- public static ReflectionManager getReflectionManager(Cfg cfg)
- {
- ReflectionManager reflectionManager;
- try
- {
- //TODO introduce a ReflectionManagerHolder interface to avoid reflection
- //I want to avoid hard link between HAN and Validator for such a simple need
- //reuse the existing reflectionManager one when possible
- reflectionManager =
- (ReflectionManager) cfg.getClass().getMethod("getReflectionManager").invoke(cfg);
+ public DirectoryProvider[] getDirectoryProviders(Class entity) {
+ DocumentBuilder<Object> documentBuilder = getDocumentBuilders().get( entity );
+ return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
+ }
- }
- catch (Exception e)
- {
- reflectionManager = new JavaReflectionManager();
- }
- return reflectionManager;
- }
+ public void optimize() {
+ Set<Class> clazzs = getDocumentBuilders().keySet();
+ for (Class clazz : clazzs) {
+ optimize( clazz );
+ }
+ }
- public DirectoryProvider[] getDirectoryProviders(Class entity)
- {
- DocumentBuilder<Object> documentBuilder = getDocumentBuilders().get(entity);
- return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
- }
+ public void optimize(Class entityType) {
+ if ( ! getDocumentBuilders().containsKey( entityType ) ) {
+ throw new SearchException("Entity not indexed: " + entityType);
+ }
+ List<LuceneWork> queue = new ArrayList<LuceneWork>(1);
+ queue.add( new OptimizeLuceneWork( entityType ) );
+ getBackendQueueProcessorFactory().getProcessor( queue ).run();
+ }
- public void optimize()
- {
- Set<Class> clazzs = getDocumentBuilders().keySet();
- for (Class clazz : clazzs)
- {
- optimize(clazz);
- }
- }
+ public Analyzer getAnalyzer(String name) {
+ final Analyzer analyzer = analyzers.get( name );
+ if ( analyzer == null) throw new SearchException( "Unknown Analyzer definition: " + name);
+ return analyzer;
+ }
- public void optimize(Class entityType)
- {
- if (!getDocumentBuilders().containsKey(entityType))
- {
- throw new SearchException("Entity not indexed: " + entityType);
- }
- List<LuceneWork> queue = new ArrayList<LuceneWork>(1);
- queue.add(new OptimizeLuceneWork(entityType));
- getBackendQueueProcessorFactory().getProcessor(queue).run();
- }
+ private void initDocumentBuilders(Cfg cfg, ReflectionManager reflectionManager) {
+ InitContext context = new InitContext( cfg );
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
- public Analyzer getAnalyzer(String name)
- {
- final Analyzer analyzer = analyzers.get(name);
- if (analyzer == null) throw new SearchException("Unknown Analyzer definition: " + name);
- return analyzer;
- }
+ while ( iter.hasNext() ) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if (mappedClass != null) {
+ XClass mappedXClass = reflectionManager.toXClass(mappedClass);
+ if ( mappedXClass != null) {
+ if ( mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+ DirectoryProviderFactory.DirectoryProviders providers = factory.createDirectoryProviders( mappedXClass, cfg, this, reflectionManager );
- private void initDocumentBuilders(Cfg cfg, ReflectionManager reflectionManager)
- {
- InitContext context = new InitContext(cfg);
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+ mappedXClass, context, providers.getProviders(), providers.getSelectionStrategy(),
+ reflectionManager
+ );
- while (iter.hasNext())
- {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if (mappedClass != null)
- {
- XClass mappedXClass = reflectionManager.toXClass(mappedClass);
- if (mappedXClass != null)
- {
- if (mappedXClass.isAnnotationPresent(Indexed.class))
- {
- DirectoryProviderFactory.DirectoryProviders providers = factory.createDirectoryProviders(mappedXClass, cfg, this, reflectionManager);
+ documentBuilders.put( mappedClass, documentBuilder );
+ }
+ bindFilterDefs(mappedXClass);
+ //TODO should analyzer def for classes at tyher sqme level???
+ }
+ }
+ }
+ analyzers = context.initLazyAnalyzers();
+ factory.startDirectoryProviders();
+ }
- final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
- mappedXClass, context, providers.getProviders(), providers.getSelectionStrategy(),
- reflectionManager
- );
+ private static FilterCachingStrategy buildFilterCachingStrategy(Properties properties) {
+ FilterCachingStrategy filterCachingStrategy;
+ String impl = properties.getProperty( Environment.FILTER_CACHING_STRATEGY );
+ if ( StringHelper.isEmpty( impl ) || "mru".equalsIgnoreCase( impl ) ) {
+ filterCachingStrategy = new MRUFilterCachingStrategy();
+ }
+ else {
+ try {
+ Class filterCachingStrategyClass = org.hibernate.annotations.common.util.ReflectHelper.classForName( impl, SearchFactoryImpl.class );
+ filterCachingStrategy = (FilterCachingStrategy) filterCachingStrategyClass.newInstance();
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find filterCachingStrategy class: " + impl, e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instantiate filterCachingStrategy class: " + impl, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instantiate filterCachingStrategy class: " + impl, e );
+ }
+ }
+ filterCachingStrategy.initialize( properties );
+ return filterCachingStrategy;
+ }
- documentBuilders.put(mappedClass, documentBuilder);
- }
- bindFilterDefs(mappedXClass);
- //TODO should analyzer def for classes at tyher sqme level???
- }
- }
- }
- analyzers = context.initLazyAnalyzers();
- factory.startDirectoryProviders();
- }
+ public FilterCachingStrategy getFilterCachingStrategy() {
+ return filterCachingStrategy;
+ }
- private static FilterCachingStrategy buildFilterCachingStrategy(Properties properties)
- {
- FilterCachingStrategy filterCachingStrategy;
- String impl = properties.getProperty(Environment.FILTER_CACHING_STRATEGY);
- if (StringHelper.isEmpty(impl) || "mru".equalsIgnoreCase(impl))
- {
- filterCachingStrategy = new MRUFilterCachingStrategy();
- }
- else
- {
- try
- {
- Class filterCachingStrategyClass = org.hibernate.annotations.common.util.ReflectHelper.classForName(impl, SearchFactoryImpl.class);
- filterCachingStrategy = (FilterCachingStrategy) filterCachingStrategyClass.newInstance();
- }
- catch (ClassNotFoundException e)
- {
- throw new SearchException("Unable to find filterCachingStrategy class: " + impl, e);
- }
- catch (IllegalAccessException e)
- {
- throw new SearchException("Unable to instantiate filterCachingStrategy class: " + impl, e);
- }
- catch (InstantiationException e)
- {
- throw new SearchException("Unable to instantiate filterCachingStrategy class: " + impl, e);
- }
- }
- filterCachingStrategy.initialize(properties);
- return filterCachingStrategy;
- }
+ public FilterDef getFilterDefinition(String name) {
+ return filterDefinitions.get( name );
+ }
- public FilterCachingStrategy getFilterCachingStrategy()
- {
- return filterCachingStrategy;
- }
+ private static class DirectoryProviderData {
+ public final Lock dirLock = new ReentrantLock();
+ public OptimizerStrategy optimizerStrategy;
+ public Set<Class> classes = new HashSet<Class>(2);
+ }
- public FilterDef getFilterDefinition(String name)
- {
- return filterDefinitions.get(name);
- }
+ public Lock getDirectoryProviderLock(DirectoryProvider dp) {
+ return this.dirProviderData.get( dp ).dirLock;
+ }
- private static class DirectoryProviderData
- {
- public final Lock dirLock = new ReentrantLock();
- public OptimizerStrategy optimizerStrategy;
- public Set<Class> classes = new HashSet<Class>(2);
- }
-
- public Lock getDirectoryProviderLock(DirectoryProvider dp)
- {
- return this.dirProviderData.get(dp).dirLock;
- }
-
- public void addDirectoryProvider(DirectoryProvider<?> provider)
- {
- this.dirProviderData.put(provider, new DirectoryProviderData());
- }
-
+ public void addDirectoryProvider(DirectoryProvider<?> provider) {
+ this.dirProviderData.put( provider, new DirectoryProviderData() );
+ }
+
}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/Search.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/Search.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/Search.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -9,8 +9,10 @@
* Helper class that should be used when building a FullTextEntityManager
*
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public final class Search {
+
private Search() {
}
@@ -18,7 +20,7 @@
* Build a full text capable EntityManager
* The underlying EM implementation has to be Hibernate EntityManager
*/
- public static FullTextEntityManager createFullTextEntityManager(EntityManager em) {
+ public static FullTextEntityManager getFullTextEntityManager(EntityManager em) {
if ( em instanceof FullTextEntityManagerImpl ) {
return (FullTextEntityManager) em;
}
@@ -26,4 +28,4 @@
return new FullTextEntityManagerImpl(em);
}
}
-}
\ No newline at end of file
+}
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -34,7 +34,7 @@
throw new SearchException("Trying to use Hibernate Search without an Hibernate EntityManager (no delegate)");
}
else if ( Session.class.isAssignableFrom( delegate.getClass() ) ) {
- ftSession = Search.createFullTextSession( (Session) delegate );
+ ftSession = Search.getFullTextSession( (Session) delegate );
}
else if ( EntityManager.class.isAssignableFrom( delegate.getClass() ) ) {
//Some app servers wrap the EM twice
@@ -43,7 +43,7 @@
throw new SearchException("Trying to use Hibernate Search without an Hibernate EntityManager (no delegate)");
}
else if ( Session.class.isAssignableFrom( delegate.getClass() ) ) {
- ftSession = Search.createFullTextSession( (Session) delegate );
+ ftSession = Search.getFullTextSession( (Session) delegate );
}
else {
throw new SearchException("Trying to use Hibernate Search without an Hibernate EntityManager: " + delegate.getClass() );
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/reader/CacheableMultiReader.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/reader/CacheableMultiReader.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/reader/CacheableMultiReader.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -1,4 +1,4 @@
-//$
+// $Id:$
package org.hibernate.search.reader;
import java.io.IOException;
Modified: search/branches/jboss_cache_integration/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/branches/jboss_cache_integration/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-10 10:37:16 UTC (rev 14912)
@@ -221,7 +221,7 @@
}
}
- private static String getDirectoryProviderName(XClass clazz, org.hibernate.search.cfg.Cfg cfg) {
+ private static String getDirectoryProviderName(XClass clazz, Cfg cfg) {
//yuk
ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
//get the most specialized (ie subclass > superclass) non default index name
Modified: search/branches/jboss_cache_integration/src/test/log4j.properties
===================================================================
--- search/branches/jboss_cache_integration/src/test/log4j.properties 2008-07-09 22:04:57 UTC (rev 14911)
+++ search/branches/jboss_cache_integration/src/test/log4j.properties 2008-07-10 10:37:16 UTC (rev 14912)
@@ -5,11 +5,17 @@
log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
### direct messages to file hibernate.log ###
-#log4j.appender.file=org.apache.log4j.FileAppender
-#log4j.appender.file.File=hibernate.log
-#log4j.appender.file.layout=org.apache.log4j.PatternLayout
-#log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+log4j.appender.file=org.apache.log4j.FileAppender
+log4j.appender.file.File=hibernate.log
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+### direct messages to socket - chainsaw ###
+log4j.appender.socket=org.apache.log4j.net.SocketAppender
+log4j.appender.socket.remoteHost=localhost
+log4j.appender.socket.port=4560
+log4j.appender.socket.locationInfo=true
+
### set log levels - for more verbose logging change 'info' to 'debug' ###
log4j.rootLogger=warn, stdout
@@ -45,4 +51,4 @@
#log4j.logger.org.hibernate.cfg.SettingsFactory=info
#log4j.logger.org.hibernate.cfg.AnnotationBinder=info
#log4j.logger.org.hibernate.cfg.AnnotationConfiguration=info
-#log4j.logger.org.hibernate.cfg.Ejb3Column=info
\ No newline at end of file
+#log4j.logger.org.hibernate.cfg.Ejb3Column=info
More information about the hibernate-commits
mailing list