[hibernate-commits] Hibernate SVN: r14935 - in search/trunk/src/java/org/hibernate/search: backend and 7 other directories.
hibernate-commits at lists.jboss.org
hibernate-commits at lists.jboss.org
Wed Jul 16 10:37:47 EDT 2008
Author: epbernard
Date: 2008-07-16 10:37:47 -0400 (Wed, 16 Jul 2008)
New Revision: 14935
Added:
search/trunk/src/java/org/hibernate/search/event/ContextHolder.java
Modified:
search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java
search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java
search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java
search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java
Log:
Clean up Navin's abstraction over Hibernate Core, clean style issues, apply back changes from my original fix patch
Modified: search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -1,22 +1,27 @@
package org.hibernate.search.annotations;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
import org.hibernate.search.bridge.StringBridge;
-import java.lang.annotation.*;
-
/**
- * This annotation means that document ids will be generated externally and does not need to be
- * contained within the class being indexed.
- * <p />
- * Basically, this means that classes annotated with this will NOT be scanned for {@link org.hibernate.search.annotations.DocumentId} annotated fields.
+ * Objects whose identifier is provided externally and not part of the object state
+ * should be marked with this annotation
+ * <p/>
+ * This annotation should not be used in conjunction with {@link org.hibernate.search.annotations.DocumentId}
+ *
* @author Navin Surtani - navin at surtani.org
*/
@Retention( RetentionPolicy.RUNTIME )
@Target( ElementType.TYPE )
@Documented
-public @interface ProvidedId
-{
+public @interface ProvidedId {
- String name() default "ProvidedId";
- Class<StringBridge> bridge() default StringBridge.class;
+ String name() default "providedId";
+
+ Class<StringBridge> bridge() default StringBridge.class;
}
Modified: search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -6,68 +6,56 @@
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.engine.SearchFactoryImplementor;
-import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
/**
* @author Emmanuel Bernard
*/
-public abstract class WorkerFactory
-{
+public abstract class WorkerFactory {
- private static Properties getProperties(SearchConfiguration cfg)
- {
- Properties props = cfg.getProperties();
- Properties workerProperties = new Properties();
- for (Map.Entry entry : props.entrySet())
- {
- String key = (String) entry.getKey();
- if (key.startsWith(Environment.WORKER_PREFIX))
- {
- //key.substring( Environment.WORKER_PREFIX.length() )
- workerProperties.setProperty(key, (String) entry.getValue());
- }
- }
- return workerProperties;
- }
+ private static Properties getProperties(SearchConfiguration cfg) {
+ Properties props = cfg.getProperties();
+ Properties workerProperties = new Properties();
+ for (Map.Entry entry : props.entrySet()) {
+ String key = (String) entry.getKey();
+ if ( key.startsWith( Environment.WORKER_PREFIX ) ) {
+ //key.substring( Environment.WORKER_PREFIX.length() )
+ workerProperties.setProperty( key, (String) entry.getValue() );
+ }
+ }
+ return workerProperties;
+ }
- public static Worker createWorker(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor)
- {
- Properties props = getProperties(cfg);
- String impl = props.getProperty(Environment.WORKER_SCOPE);
- Worker worker;
- if (StringHelper.isEmpty(impl))
- {
- worker = new TransactionalWorker();
- }
- else if ("transaction".equalsIgnoreCase(impl))
- {
- worker = new TransactionalWorker();
- }
- else
- {
- try
- {
- Class workerClass = ReflectHelper.classForName(impl, WorkerFactory.class);
- worker = (Worker) workerClass.newInstance();
- }
- catch (ClassNotFoundException e)
- {
- throw new SearchException("Unable to find worker class: " + impl, e);
- }
- catch (IllegalAccessException e)
- {
- throw new SearchException("Unable to instanciate worker class: " + impl, e);
- }
- catch (InstantiationException e)
- {
- throw new SearchException("Unable to instanciate worker class: " + impl, e);
- }
- }
- worker.initialize(props, searchFactoryImplementor);
- return worker;
- }
+ public static Worker createWorker(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor) {
+ Properties props = getProperties( cfg );
+ String impl = props.getProperty( Environment.WORKER_SCOPE );
+ Worker worker;
+ if ( StringHelper.isEmpty( impl ) ) {
+ worker = new TransactionalWorker();
+ }
+ else if ( "transaction".equalsIgnoreCase( impl ) ) {
+ worker = new TransactionalWorker();
+ }
+ else {
+ try {
+ Class workerClass = ReflectHelper.classForName( impl, WorkerFactory.class );
+ worker = (Worker) workerClass.newInstance();
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find worker class: " + impl, e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instanciate worker class: " + impl, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instanciate worker class: " + impl, e );
+ }
+ }
+ worker.initialize( props, searchFactoryImplementor );
+ return worker;
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -1,6 +1,8 @@
//$Id$
package org.hibernate.search.backend.impl;
+import java.util.Properties;
+
import org.hibernate.search.backend.QueueingProcessor;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
@@ -9,9 +11,6 @@
import org.hibernate.search.transaction.TransactionContext;
import org.hibernate.search.util.WeakIdentityHashMap;
-import javax.transaction.Transaction;
-import java.util.Properties;
-
/**
* Queue works per transaction.
* If out of transaction, the work is executed right away
@@ -21,55 +20,46 @@
*
* @author Emmanuel Bernard
*/
-public class TransactionalWorker implements Worker
-{
- //not a synchronized map since for a given transaction, we have not concurrent access
- protected final WeakIdentityHashMap synchronizationPerTransaction = new WeakIdentityHashMap();
- private QueueingProcessor queueingProcessor;
+public class TransactionalWorker implements Worker {
+ //not a synchronized map since for a given transaction, we have not concurrent access
+ protected final WeakIdentityHashMap synchronizationPerTransaction = new WeakIdentityHashMap();
+ private QueueingProcessor queueingProcessor;
- public void performWork(Work work, TransactionContext transactionContext)
- {
- if (transactionContext.isTxInProgress())
- {
- Object transaction = transactionContext.getTransactionIdentifier();
- PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
- synchronizationPerTransaction.get(transaction);
- if (txSync == null || txSync.isConsumed())
- {
- txSync = new PostTransactionWorkQueueSynchronization(queueingProcessor, synchronizationPerTransaction);
- transactionContext.registerSynchronization(txSync);
- synchronizationPerTransaction.put(transaction, txSync);
- }
- txSync.add(work);
- }
- else
- {
- WorkQueue queue = new WorkQueue(2); //one work can be split
- queueingProcessor.add(work, queue);
- queueingProcessor.prepareWorks(queue);
- queueingProcessor.performWorks(queue);
- }
- }
+ public void performWork(Work work, TransactionContext transactionContext) {
+ if ( transactionContext.isTransactionInProgress() ) {
+ Object transaction = transactionContext.getTransactionIdentifier();
+ PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
+ synchronizationPerTransaction.get( transaction );
+ if ( txSync == null || txSync.isConsumed() ) {
+ txSync = new PostTransactionWorkQueueSynchronization( queueingProcessor, synchronizationPerTransaction );
+ transactionContext.registerSynchronization( txSync );
+ synchronizationPerTransaction.put( transaction, txSync );
+ }
+ txSync.add( work );
+ }
+ else {
+ WorkQueue queue = new WorkQueue( 2 ); //one work can be split
+ queueingProcessor.add( work, queue );
+ queueingProcessor.prepareWorks( queue );
+ queueingProcessor.performWorks( queue );
+ }
+ }
- public void initialize(Properties props, SearchFactoryImplementor searchFactory)
- {
- this.queueingProcessor = new BatchedQueueingProcessor(searchFactory, props);
- }
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
+ this.queueingProcessor = new BatchedQueueingProcessor( searchFactory, props );
+ }
- public void close()
- {
- queueingProcessor.close();
- }
+ public void close() {
+ queueingProcessor.close();
+ }
-
-
public void flushWorks(TransactionContext transactionContext) {
- if ( transactionContext.isTxInProgress() ) {
+ if ( transactionContext.isTransactionInProgress() ) {
Object transaction = transactionContext.getTransactionIdentifier();
PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
synchronizationPerTransaction.get( transaction );
- if ( txSync != null && ! txSync.isConsumed() ) {
+ if ( txSync != null && !txSync.isConsumed() ) {
txSync.flushWorks();
}
}
Modified: search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -16,9 +16,8 @@
*
* @return iterator of indexed classes.
*/
+ Iterator<Class<?>> getClassMappings();
- Iterator<Class> getClassMappings();
-
/**
* Returns a {@link java.lang.Class} from a String parameter.
* TODO: should it be Iteratable
@@ -26,7 +25,7 @@
* @return java.lang.Class
*/
- Class getClassMapping(String name);
+ Class<?> getClassMapping(String name);
/**
* Gets a configuration property from its name
@@ -35,7 +34,6 @@
* @param propertyName - as a String.
* @return the property as a String
*/
-
String getProperty(String propertyName);
/**
@@ -49,7 +47,8 @@
/**
* Returns a reflection manager if already available in the environment
* null otherwise
- * @return ReflectionManager
+ *
+ * @return ReflectionManager
*/
ReflectionManager getReflectionManager();
Modified: search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -1,92 +1,94 @@
package org.hibernate.search.cfg;
+import java.util.Iterator;
import java.util.Properties;
-import java.util.Iterator;
+import java.util.NoSuchElementException;
-import org.hibernate.mapping.PersistentClass;
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.mapping.PersistentClass;
/**
* Search configuration implementation wrapping an Hibernate Core configuration
*
* @author Emmanuel Bernard
*/
-public class SearchConfigurationFromHibernateCore implements SearchConfiguration
-{
- private org.hibernate.cfg.Configuration cfg;
- private ReflectionManager reflectionManager;
+public class SearchConfigurationFromHibernateCore implements SearchConfiguration {
+ private org.hibernate.cfg.Configuration cfg;
+ private ReflectionManager reflectionManager;
- public SearchConfigurationFromHibernateCore(org.hibernate.cfg.Configuration cfg)
- {
- if (cfg == null) throw new NullPointerException("Configuration is null");
- this.cfg = cfg;
- }
+ public SearchConfigurationFromHibernateCore(org.hibernate.cfg.Configuration cfg) {
+ if ( cfg == null ) throw new NullPointerException( "Configuration is null" );
+ this.cfg = cfg;
+ }
- public Iterator<Class> getClassMappings()
- {
- return new ClassIterator(cfg.getClassMappings());
- }
+ public Iterator<Class<?>> getClassMappings() {
+ return new ClassIterator( cfg.getClassMappings() );
+ }
- public Class getClassMapping(String name)
- {
- return cfg.getClassMapping(name).getMappedClass();
- }
+ public Class<?> getClassMapping(String name) {
+ return cfg.getClassMapping( name ).getMappedClass();
+ }
- public String getProperty(String propertyName)
- {
- return cfg.getProperty(propertyName);
- }
+ public String getProperty(String propertyName) {
+ return cfg.getProperty( propertyName );
+ }
- public Properties getProperties()
- {
- return cfg.getProperties();
- }
+ public Properties getProperties() {
+ return cfg.getProperties();
+ }
- public ReflectionManager getReflectionManager()
- {
- if (reflectionManager == null)
- {
- try
- {
- //TODO introduce a ReflectionManagerHolder interface to avoid reflection
- //I want to avoid hard link between HAN and Validator for such a simple need
- //reuse the existing reflectionManager one when possible
- reflectionManager =
- (ReflectionManager) cfg.getClass().getMethod("getReflectionManager").invoke(cfg);
+ public ReflectionManager getReflectionManager() {
+ if ( reflectionManager == null ) {
+ try {
+ //TODO introduce a ReflectionManagerHolder interface to avoid reflection
+ //I want to avoid hard link between HAN and Validator for such a simple need
+ //reuse the existing reflectionManager one when possible
+ reflectionManager =
+ (ReflectionManager) cfg.getClass().getMethod( "getReflectionManager" ).invoke( cfg );
- }
- catch (Exception e)
- {
- reflectionManager = new JavaReflectionManager();
- }
- }
- return reflectionManager;
- }
+ }
+ catch (Exception e) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ }
+ return reflectionManager;
+ }
- private class ClassIterator implements Iterator<Class>
- {
- private Iterator hibernatePersistentClassIterator;
+ private static class ClassIterator implements Iterator<Class<?>> {
+ private Iterator hibernatePersistentClassIterator;
+ private Class<?> future;
+ private Class<?> current;
- private ClassIterator(Iterator hibernatePersistentClassIterator)
- {
- this.hibernatePersistentClassIterator = hibernatePersistentClassIterator;
- }
+ private ClassIterator(Iterator hibernatePersistentClassIterator) {
+ this.hibernatePersistentClassIterator = hibernatePersistentClassIterator;
+ }
- public boolean hasNext()
- {
- return hibernatePersistentClassIterator.hasNext();
- }
+ public boolean hasNext() {
+ //we need to read the next non null one. getMappedClass() can return null and should be ingored
+ if ( future != null) return true;
+ do {
+ if ( ! hibernatePersistentClassIterator.hasNext() ) {
+ future = null;
+ return false;
+ }
+ final PersistentClass pc = (PersistentClass) hibernatePersistentClassIterator.next();
+ future = pc.getMappedClass();
+ }
+ while ( future == null );
+ return true;
+ }
- public Class next()
- {
- PersistentClass pc = (PersistentClass) hibernatePersistentClassIterator.next();
- return pc.getMappedClass();
- }
+ public Class<?> next() {
+ //run hasNext to init the next element
+ if ( ! hasNext() ) throw new NoSuchElementException();
+ Class<?> result = future;
+ future = null;
+ return result;
+ }
- public void remove()
- {
- hibernatePersistentClassIterator.remove();
- }
- }
+ public void remove() {
+ throw new UnsupportedOperationException( "Cannot modify Hibenrate Core metadata" );
+ }
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -27,7 +27,18 @@
import org.hibernate.annotations.common.util.StringHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.search.SearchException;
-import org.hibernate.search.annotations.*;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.ClassBridges;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.ProvidedId;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.DeleteLuceneWork;
import org.hibernate.search.backend.LuceneWork;
@@ -56,959 +67,816 @@
* @author Richard Hallier
* @author Hardy Ferentschik
*/
- at SuppressWarnings("unchecked")
-public class DocumentBuilder<T>
-{
- private static final Logger log = LoggerFactory.getLogger(DocumentBuilder.class);
+ at SuppressWarnings( "unchecked" )
+public class DocumentBuilder<T> {
+ private static final Logger log = LoggerFactory.getLogger( DocumentBuilder.class );
- private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
- private final XClass beanClass;
- private final DirectoryProvider[] directoryProviders;
- private final IndexShardingStrategy shardingStrategy;
- private String idKeywordName;
- private XMember idGetter;
- private Float idBoost;
- public static final String CLASS_FIELDNAME = "_hibernate_class";
- private TwoWayFieldBridge idBridge;
- private Set<Class> mappedSubclasses = new HashSet<Class>();
- private ReflectionManager reflectionManager;
- private int level = 0;
- private int maxLevel = Integer.MAX_VALUE;
- private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
- private Similarity similarity;
- private boolean isRoot;
- //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- private boolean safeFromTupleId;
- private boolean idProvided = false;
+ private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
+ private final XClass beanClass;
+ private final DirectoryProvider[] directoryProviders;
+ private final IndexShardingStrategy shardingStrategy;
+ private String idKeywordName;
+ private XMember idGetter;
+ private Float idBoost;
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+ private TwoWayFieldBridge idBridge;
+ private Set<Class> mappedSubclasses = new HashSet<Class>();
+ private ReflectionManager reflectionManager;
+ private int level = 0;
+ private int maxLevel = Integer.MAX_VALUE;
+ private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
+ private Similarity similarity;
+ private boolean isRoot;
+ //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ private boolean safeFromTupleId;
+ private boolean idProvided = false;
- public boolean isRoot()
- {
- return isRoot;
- }
+ public boolean isRoot() {
+ return isRoot;
+ }
- public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
- IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager)
- {
- this.beanClass = clazz;
- this.directoryProviders = directoryProviders;
- this.shardingStrategy = shardingStrategy;
- //FIXME get rid of it when boost is stored?
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
+ public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
+ IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
+ this.beanClass = clazz;
+ this.directoryProviders = directoryProviders;
+ this.shardingStrategy = shardingStrategy;
+ //FIXME get rid of it when boost is stored?
+ this.reflectionManager = reflectionManager;
+ this.similarity = context.getDefaultSimilarity();
- if (clazz == null) throw new AssertionFailure("Unable to build a DocumentBuilder with a null class");
- rootPropertiesMetadata.boost = getBoost(clazz);
- rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
- Set<XClass> processedClasses = new HashSet<XClass>();
- processedClasses.add(clazz);
- initializeMembers(clazz, rootPropertiesMetadata, true, "", processedClasses, context);
- //processedClasses.remove( clazz ); for the sake of completness
- this.analyzer.setGlobalAnalyzer(rootPropertiesMetadata.analyzer);
- if (idKeywordName == null)
- {
- //check if there is a ProvidedId first.
+ if ( clazz == null ) throw new AssertionFailure( "Unable to build a DocumentBuilder with a null class" );
+ rootPropertiesMetadata.boost = getBoost( clazz );
+ rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
+ Set<XClass> processedClasses = new HashSet<XClass>();
+ processedClasses.add( clazz );
+ initializeMembers( clazz, rootPropertiesMetadata, true, "", processedClasses, context );
+ //processedClasses.remove( clazz ); for the sake of completness
+ this.analyzer.setGlobalAnalyzer( rootPropertiesMetadata.analyzer );
+ if ( idKeywordName == null ) {
+ // if no DocumentId then check if we have a ProvidedId instead
+ ProvidedId provided = clazz.getAnnotation( org.hibernate.search.annotations.ProvidedId.class );
+ if ( provided == null ) throw new SearchException( "No document id in: " + clazz.getName() );
- ProvidedId provided = clazz.getAnnotation(org.hibernate.search.annotations.ProvidedId.class);
- if (provided == null) throw new SearchException("No document id in: " + clazz.getName());
- idBridge = getProvidedIdBridge();
- idKeywordName = provided.name();
+ //FIXME navssurtani use something similar to BridgeFactory.extractType(ClassBridge)
+ idBridge = getProvidedIdBridge();
+ idKeywordName = provided.name();
- }
- //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails on most database
- //a TwoWayString2FieldBridgeAdaptor is never a composite id
- safeFromTupleId = TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom(idBridge.getClass());
- }
+ }
+ //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails on most database
+ //a TwoWayString2FieldBridgeAdaptor is never a composite id
+ safeFromTupleId = TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom( idBridge.getClass() );
+ }
- private TwoWayFieldBridge getProvidedIdBridge()
- {
- return new TwoWayString2FieldBridgeAdaptor(new StringBridge());
- }
+ private TwoWayFieldBridge getProvidedIdBridge() {
+ return new TwoWayString2FieldBridgeAdaptor( new StringBridge() );
+ }
+ private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
+ org.hibernate.search.annotations.Analyzer analyzerAnn =
+ annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
+ return getAnalyzer( analyzerAnn, context );
+ }
- private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context)
- {
- org.hibernate.search.annotations.Analyzer analyzerAnn =
- annotatedElement.getAnnotation(org.hibernate.search.annotations.Analyzer.class);
- return getAnalyzer(analyzerAnn, context);
- }
+ private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context) {
+ Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
+ if ( analyzerClass == void.class ) {
+ String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
+ if ( StringHelper.isEmpty( definition ) ) {
+ return null;
+ }
+ else {
- private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context)
- {
- Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
- if (analyzerClass == void.class)
- {
- String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
- if (StringHelper.isEmpty(definition))
- {
- return null;
- }
- else
- {
+ return context.buildLazyAnalyzer( definition );
+ }
+ }
+ else {
+ try {
+ return (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new SearchException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
+ );
+ }
+ catch (Exception e) {
+ throw new SearchException( "Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e );
+ }
+ }
+ }
- return context.buildLazyAnalyzer(definition);
- }
- }
- else
- {
- try
- {
- return (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e)
- {
- throw new SearchException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
- );
- }
- catch (Exception e)
- {
- throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e);
- }
- }
- }
+ private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
+ Set<XClass> processedClasses, InitContext context) {
+ List<XClass> hierarchy = new ArrayList<XClass>();
+ for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass()) {
+ hierarchy.add( currClass );
+ }
+ Class similarityClass = null;
+ for (int index = hierarchy.size() - 1; index >= 0; index--) {
+ XClass currClass = hierarchy.get( index );
+ /**
+ * Override the default analyzer for the properties if the class hold one
+ * That's the reason we go down the hierarchy
+ */
+ Analyzer analyzer = getAnalyzer( currClass, context );
- private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
- Set<XClass> processedClasses, InitContext context)
- {
- List<XClass> hierarchy = new ArrayList<XClass>();
- for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass())
- {
- hierarchy.add(currClass);
- }
- Class similarityClass = null;
- for (int index = hierarchy.size() - 1; index >= 0; index--)
- {
- XClass currClass = hierarchy.get(index);
- /**
- * Override the default analyzer for the properties if the class hold one
- * That's the reason we go down the hierarchy
- */
- Analyzer analyzer = getAnalyzer(currClass, context);
+ if ( analyzer != null ) {
+ propertiesMetadata.analyzer = analyzer;
+ }
+ getAnalyzerDefs( currClass, context );
+ // Check for any ClassBridges annotation.
+ ClassBridges classBridgesAnn = currClass.getAnnotation( ClassBridges.class );
+ if ( classBridgesAnn != null ) {
+ ClassBridge[] cbs = classBridgesAnn.value();
+ for (ClassBridge cb : cbs) {
+ bindClassAnnotation( prefix, propertiesMetadata, cb, context );
+ }
+ }
- if (analyzer != null)
- {
- propertiesMetadata.analyzer = analyzer;
- }
- getAnalyzerDefs(currClass, context);
- // Check for any ClassBridges annotation.
- ClassBridges classBridgesAnn = currClass.getAnnotation(ClassBridges.class);
- if (classBridgesAnn != null)
- {
- ClassBridge[] cbs = classBridgesAnn.value();
- for (ClassBridge cb : cbs)
- {
- bindClassAnnotation(prefix, propertiesMetadata, cb, context);
- }
- }
+ // Check for any ClassBridge style of annotations.
+ ClassBridge classBridgeAnn = currClass.getAnnotation( ClassBridge.class );
+ if ( classBridgeAnn != null ) {
+ bindClassAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
+ }
- // Check for any ClassBridge style of annotations.
- ClassBridge classBridgeAnn = currClass.getAnnotation(ClassBridge.class);
- if (classBridgeAnn != null)
- {
- bindClassAnnotation(prefix, propertiesMetadata, classBridgeAnn, context);
- }
+ //Get similarity
+ //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
+ if ( isRoot ) {
+ org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation( org.hibernate.search.annotations.Similarity.class );
+ if ( similarityAnn != null ) {
+ if ( similarityClass != null ) {
+ throw new SearchException( "Multiple Similarities defined in the same class hierarchy: " + beanClass.getName() );
+ }
+ similarityClass = similarityAnn.impl();
+ }
+ }
- //Get similarity
- //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
- if (isRoot)
- {
- org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation(org.hibernate.search.annotations.Similarity.class);
- if (similarityAnn != null)
- {
- if (similarityClass != null)
- {
- throw new SearchException("Multiple Similarities defined in the same class hierarchy: " + beanClass.getName());
- }
- similarityClass = similarityAnn.impl();
- }
- }
+ //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
+ // so indexing a non property does not make sense
+ List<XProperty> methods = currClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
+ for (XProperty method : methods) {
+ initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses, context );
+ }
- //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
- // so indexing a non property does not make sense
- List<XProperty> methods = currClass.getDeclaredProperties(XClass.ACCESS_PROPERTY);
- for (XProperty method : methods)
- {
- initializeMember(method, propertiesMetadata, isRoot, prefix, processedClasses, context);
- }
+ List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD );
+ for (XProperty field : fields) {
+ initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses, context );
+ }
+ }
+ if ( isRoot && similarityClass != null ) {
+ try {
+ similarity = (Similarity) similarityClass.newInstance();
+ }
+ catch (Exception e) {
+ log.error( "Exception attempting to instantiate Similarity '{}' set for {}",
+ similarityClass.getName(), beanClass.getName() );
+ }
+ }
+ }
- List<XProperty> fields = currClass.getDeclaredProperties(XClass.ACCESS_FIELD);
- for (XProperty field : fields)
- {
- initializeMember(field, propertiesMetadata, isRoot, prefix, processedClasses, context);
- }
- }
- if (isRoot && similarityClass != null)
- {
- try
- {
- similarity = (Similarity) similarityClass.newInstance();
- }
- catch (Exception e)
- {
- log.error("Exception attempting to instantiate Similarity '{}' set for {}",
- similarityClass.getName(), beanClass.getName());
- }
- }
- }
+ private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context) {
+ AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
+ if ( defs != null ) {
+ for (AnalyzerDef def : defs.value()) {
+ context.addAnalyzerDef( def );
+ }
+ }
+ AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
+ context.addAnalyzerDef( def );
+ }
- private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context)
- {
- AnalyzerDefs defs = annotatedElement.getAnnotation(AnalyzerDefs.class);
- if (defs != null)
- {
- for (AnalyzerDef def : defs.value())
- {
- context.addAnalyzerDef(def);
- }
- }
- AnalyzerDef def = annotatedElement.getAnnotation(AnalyzerDef.class);
- context.addAnalyzerDef(def);
- }
+ public String getIdentifierName() {
+ return idGetter.getName();
+ }
- public String getIdentifierName()
- {
- return idGetter.getName();
- }
+ public Similarity getSimilarity() {
+ return similarity;
+ }
- public Similarity getSimilarity()
- {
- return similarity;
- }
+ private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
+ String prefix, Set<XClass> processedClasses, InitContext context) {
- private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
- String prefix, Set<XClass> processedClasses, InitContext context)
- {
+ DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ if ( isRoot ) {
+ if ( idKeywordName != null ) {
+ throw new AssertionFailure( "Two document id assigned: "
+ + idKeywordName + " and " + BinderHelper.getAttributeName( member, documentIdAnn.name() ) );
+ }
+ idKeywordName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
+ FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager );
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = (TwoWayFieldBridge) fieldBridge;
+ }
+ else {
+ throw new SearchException(
+ "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName() );
+ }
+ idBoost = getBoost( member );
+ setAccessible( member );
+ idGetter = member;
+ }
+ else {
+ //component should index their document id
+ setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
+ propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member, reflectionManager ) );
+ // property > entity analyzer (no field analyzer)
+ Analyzer analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+ {
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Field.class );
+ if ( fieldAnn != null ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ {
+ org.hibernate.search.annotations.Fields fieldsAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Fields.class );
+ if ( fieldsAnn != null ) {
+ for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value()) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ }
+ getAnalyzerDefs( member, context );
- DocumentId documentIdAnn = member.getAnnotation(DocumentId.class);
- if (documentIdAnn != null)
- {
- if (isRoot)
- {
- if (idKeywordName != null)
- {
- throw new AssertionFailure("Two document id assigned: "
- + idKeywordName + " and " + BinderHelper.getAttributeName(member, documentIdAnn.name()));
- }
- idKeywordName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
- FieldBridge fieldBridge = BridgeFactory.guessType(null, member, reflectionManager);
- if (fieldBridge instanceof TwoWayFieldBridge)
- {
- idBridge = (TwoWayFieldBridge) fieldBridge;
- }
- else
- {
- throw new SearchException(
- "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName());
- }
- idBoost = getBoost(member);
- setAccessible(member);
- idGetter = member;
- }
- else
- {
- //component should index their document id
- setAccessible(member);
- propertiesMetadata.fieldGetters.add(member);
- String fieldName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
- propertiesMetadata.fieldNames.add(fieldName);
- propertiesMetadata.fieldStore.add(getStore(Store.YES));
- propertiesMetadata.fieldIndex.add(getIndex(Index.UN_TOKENIZED));
- propertiesMetadata.fieldTermVectors.add(getTermVector(TermVector.NO));
- propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(null, member, reflectionManager));
- // property > entity analyzer (no field analyzer)
- Analyzer analyzer = getAnalyzer(member, context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
- }
- {
- org.hibernate.search.annotations.Field fieldAnn =
- member.getAnnotation(org.hibernate.search.annotations.Field.class);
- if (fieldAnn != null)
- {
- bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
- }
- }
- {
- org.hibernate.search.annotations.Fields fieldsAnn =
- member.getAnnotation(org.hibernate.search.annotations.Fields.class);
- if (fieldsAnn != null)
- {
- for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value())
- {
- bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
- }
- }
- }
- getAnalyzerDefs(member, context);
+ IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
+ if ( embeddedAnn != null ) {
+ int oldMaxLevel = maxLevel;
+ int potentialLevel = embeddedAnn.depth() + level;
+ if ( potentialLevel < 0 ) {
+ potentialLevel = Integer.MAX_VALUE;
+ }
+ maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
+ level++;
- IndexedEmbedded embeddedAnn = member.getAnnotation(IndexedEmbedded.class);
- if (embeddedAnn != null)
- {
- int oldMaxLevel = maxLevel;
- int potentialLevel = embeddedAnn.depth() + level;
- if (potentialLevel < 0)
- {
- potentialLevel = Integer.MAX_VALUE;
- }
- maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
- level++;
+ XClass elementClass;
+ if ( void.class == embeddedAnn.targetElement() ) {
+ elementClass = member.getElementClass();
+ }
+ else {
+ elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
+ }
+ if ( maxLevel == Integer.MAX_VALUE //infinite
+ && processedClasses.contains( elementClass ) ) {
+ throw new SearchException(
+ "Circular reference. Duplicate use of "
+ + elementClass.getName()
+ + " in root entity " + beanClass.getName()
+ + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
+ );
+ }
+ if ( level <= maxLevel ) {
+ processedClasses.add( elementClass ); //push
- XClass elementClass;
- if (void.class == embeddedAnn.targetElement())
- {
- elementClass = member.getElementClass();
- }
- else
- {
- elementClass = reflectionManager.toXClass(embeddedAnn.targetElement());
- }
- if (maxLevel == Integer.MAX_VALUE //infinite
- && processedClasses.contains(elementClass))
- {
- throw new SearchException(
- "Circular reference. Duplicate use of "
- + elementClass.getName()
- + " in root entity " + beanClass.getName()
- + "#" + buildEmbeddedPrefix(prefix, embeddedAnn, member)
- );
- }
- if (level <= maxLevel)
- {
- processedClasses.add(elementClass); //push
+ setAccessible( member );
+ propertiesMetadata.embeddedGetters.add( member );
+ PropertiesMetadata metadata = new PropertiesMetadata();
+ propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
+ metadata.boost = getBoost( member );
+ //property > entity analyzer
+ Analyzer analyzer = getAnalyzer( member, context );
+ metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ initializeMembers( elementClass, metadata, false, localPrefix, processedClasses, context );
+ /**
+ * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
+ */
+ if ( member.isArray() ) {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
+ }
+ else if ( member.isCollection() ) {
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum subclasses etc etc??
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION );
+ }
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
+ }
- setAccessible(member);
- propertiesMetadata.embeddedGetters.add(member);
- PropertiesMetadata metadata = new PropertiesMetadata();
- propertiesMetadata.embeddedPropertiesMetadata.add(metadata);
- metadata.boost = getBoost(member);
- //property > entity analyzer
- Analyzer analyzer = getAnalyzer(member, context);
- metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
- String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
- initializeMembers(elementClass, metadata, false, localPrefix, processedClasses, context);
- /**
- * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
- */
- if (member.isArray())
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.ARRAY);
- }
- else if (member.isCollection())
- {
- if (Map.class.equals(member.getCollectionClass()))
- {
- //hum subclasses etc etc??
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.MAP);
- }
- else
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.COLLECTION);
- }
- }
- else
- {
- propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.OBJECT);
- }
+ processedClasses.remove( elementClass ); //pop
+ }
+ else if ( log.isTraceEnabled() ) {
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ log.trace( "depth reached, ignoring {}", localPrefix );
+ }
- processedClasses.remove(elementClass); //pop
- }
- else if (log.isTraceEnabled())
- {
- String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
- log.trace("depth reached, ignoring {}", localPrefix);
- }
+ level--;
+ maxLevel = oldMaxLevel; //set back the the old max level
+ }
- level--;
- maxLevel = oldMaxLevel; //set back the the old max level
- }
+ ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
+ if ( containedAnn != null ) {
+ setAccessible( member );
+ propertiesMetadata.containedInGetters.add( member );
+ }
+ }
- ContainedIn containedAnn = member.getAnnotation(ContainedIn.class);
- if (containedAnn != null)
- {
- setAccessible(member);
- propertiesMetadata.containedInGetters.add(member);
- }
- }
+ private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context) {
+ //FIXME name should be prefixed
+ String fieldName = prefix + ann.name();
+ propertiesMetadata.classNames.add( fieldName );
+ propertiesMetadata.classStores.add( getStore( ann.store() ) );
+ propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
+ propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
+ propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
+ propertiesMetadata.classBoosts.add( ann.boost().value() );
- private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context)
- {
- //FIXME name should be prefixed
- String fieldName = prefix + ann.name();
- propertiesMetadata.classNames.add(fieldName);
- propertiesMetadata.classStores.add(getStore(ann.store()));
- propertiesMetadata.classIndexes.add(getIndex(ann.index()));
- propertiesMetadata.classTermVectors.add(getTermVector(ann.termVector()));
- propertiesMetadata.classBridges.add(BridgeFactory.extractType(ann));
- propertiesMetadata.classBoosts.add(ann.boost().value());
+ Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analyzer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
- Analyzer analyzer = getAnalyzer(ann.analyzer(), context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analyzer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
+ private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context) {
+ setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
+ propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member, reflectionManager ) );
- private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context)
- {
- setAccessible(member);
- propertiesMetadata.fieldGetters.add(member);
- String fieldName = prefix + BinderHelper.getAttributeName(member, fieldAnn.name());
- propertiesMetadata.fieldNames.add(fieldName);
- propertiesMetadata.fieldStore.add(getStore(fieldAnn.store()));
- propertiesMetadata.fieldIndex.add(getIndex(fieldAnn.index()));
- propertiesMetadata.fieldTermVectors.add(getTermVector(fieldAnn.termVector()));
- propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(fieldAnn, member, reflectionManager));
+ // Field > property > entity analyzer
+ Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
+ if ( analyzer == null ) analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
+ if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
- // Field > property > entity analyzer
- Analyzer analyzer = getAnalyzer(fieldAnn.analyzer(), context);
- if (analyzer == null) analyzer = getAnalyzer(member, context);
- if (analyzer == null) analyzer = propertiesMetadata.analyzer;
- if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
- this.analyzer.addScopedAnalyzer(fieldName, analyzer);
- }
+ private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member) {
+ String localPrefix = prefix;
+ if ( ".".equals( embeddedAnn.prefix() ) ) {
+ //default to property name
+ localPrefix += member.getName() + '.';
+ }
+ else {
+ localPrefix += embeddedAnn.prefix();
+ }
+ return localPrefix;
+ }
- private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member)
- {
- String localPrefix = prefix;
- if (".".equals(embeddedAnn.prefix()))
- {
- //default to property name
- localPrefix += member.getName() + '.';
- }
- else
- {
- localPrefix += embeddedAnn.prefix();
- }
- return localPrefix;
- }
+ private Field.Store getStore(Store store) {
+ switch ( store ) {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure( "Unexpected Store: " + store );
+ }
+ }
- private Field.Store getStore(Store store)
- {
- switch (store)
- {
- case NO:
- return Field.Store.NO;
- case YES:
- return Field.Store.YES;
- case COMPRESS:
- return Field.Store.COMPRESS;
- default:
- throw new AssertionFailure("Unexpected Store: " + store);
- }
- }
+ private Field.TermVector getTermVector(TermVector vector) {
+ switch ( vector ) {
+ case NO:
+ return Field.TermVector.NO;
+ case YES:
+ return Field.TermVector.YES;
+ case WITH_OFFSETS:
+ return Field.TermVector.WITH_OFFSETS;
+ case WITH_POSITIONS:
+ return Field.TermVector.WITH_POSITIONS;
+ case WITH_POSITION_OFFSETS:
+ return Field.TermVector.WITH_POSITIONS_OFFSETS;
+ default:
+ throw new AssertionFailure( "Unexpected TermVector: " + vector );
+ }
+ }
- private Field.TermVector getTermVector(TermVector vector)
- {
- switch (vector)
- {
- case NO:
- return Field.TermVector.NO;
- case YES:
- return Field.TermVector.YES;
- case WITH_OFFSETS:
- return Field.TermVector.WITH_OFFSETS;
- case WITH_POSITIONS:
- return Field.TermVector.WITH_POSITIONS;
- case WITH_POSITION_OFFSETS:
- return Field.TermVector.WITH_POSITIONS_OFFSETS;
- default:
- throw new AssertionFailure("Unexpected TermVector: " + vector);
- }
- }
+ private Field.Index getIndex(Index index) {
+ switch ( index ) {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.TOKENIZED;
+ case UN_TOKENIZED:
+ return Field.Index.UN_TOKENIZED;
+ default:
+ throw new AssertionFailure( "Unexpected Index: " + index );
+ }
+ }
- private Field.Index getIndex(Index index)
- {
- switch (index)
- {
- case NO:
- return Field.Index.NO;
- case NO_NORMS:
- return Field.Index.NO_NORMS;
- case TOKENIZED:
- return Field.Index.TOKENIZED;
- case UN_TOKENIZED:
- return Field.Index.UN_TOKENIZED;
- default:
- throw new AssertionFailure("Unexpected Index: " + index);
- }
- }
+ private Float getBoost(XAnnotatedElement element) {
+ if ( element == null ) return null;
+ Boost boost = element.getAnnotation( Boost.class );
+ return boost != null ?
+ boost.value() :
+ null;
+ }
- private Float getBoost(XAnnotatedElement element)
- {
- if (element == null) return null;
- Boost boost = element.getAnnotation(Boost.class);
- return boost != null ?
- boost.value() :
- null;
- }
+ private Object getMemberValue(Object bean, XMember getter) {
+ Object value;
+ try {
+ value = getter.invoke( bean );
+ }
+ catch (Exception e) {
+ throw new IllegalStateException( "Could not get property value", e );
+ }
+ return value;
+ }
- private Object getMemberValue(Object bean, XMember getter)
- {
- Object value;
- try
- {
- value = getter.invoke(bean);
- }
- catch (Exception e)
- {
- throw new IllegalStateException("Could not get property value", e);
- }
- return value;
- }
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
+ //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
+ for (LuceneWork luceneWork : queue) {
+ //any work on the same entity should be ignored
+ if ( luceneWork.getEntityClass() == entityClass
+ ) {
+ Serializable currentId = luceneWork.getId();
+ if ( currentId != null && currentId.equals( id ) ) { //find a way to use Type.equals(x,y)
+ return;
+ }
+ //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
+ }
- //TODO could we use T instead of EntityClass?
- public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor)
- {
- //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
- for (LuceneWork luceneWork : queue)
- {
- //any work on the same entity should be ignored
- if (luceneWork.getEntityClass() == entityClass
- )
- {
- Serializable currentId = luceneWork.getId();
- if (currentId != null && currentId.equals(id))
- { //find a way to use Type.equals(x,y)
- return;
- }
- //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
- }
+ }
+ boolean searchForContainers = false;
+ String idInString = idBridge.objectToString( id );
+ if ( workType == WorkType.ADD ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ searchForContainers = true;
+ }
+ else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ }
+ else if ( workType == WorkType.PURGE_ALL ) {
+ queue.add( new PurgeAllLuceneWork( entityClass ) );
+ }
+ else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
+ Document doc = getDocument( entity, id );
+ /**
+ * even with Lucene 2.1, use of indexWriter to update is not an option
+ * We can only delete by term, and the index doesn't have a term that
+ * uniquely identify the entry.
+ * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
+ * double file opening.
+ */
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ searchForContainers = true;
+ }
+ else if ( workType == WorkType.INDEX ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ LuceneWork work = new AddLuceneWork( id, idInString, entityClass, doc );
+ work.setBatch( true );
+ queue.add( work );
+ searchForContainers = true;
+ }
- }
- boolean searchForContainers = false;
- String idInString = idBridge.objectToString(id);
- if (workType == WorkType.ADD)
- {
- Document doc = getDocument(entity, id);
- queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
- searchForContainers = true;
- }
- else if (workType == WorkType.DELETE || workType == WorkType.PURGE)
- {
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- }
- else if (workType == WorkType.PURGE_ALL)
- {
- queue.add(new PurgeAllLuceneWork(entityClass));
- }
- else if (workType == WorkType.UPDATE || workType == WorkType.COLLECTION)
- {
- Document doc = getDocument(entity, id);
- /**
- * even with Lucene 2.1, use of indexWriter to update is not an option
- * We can only delete by term, and the index doesn't have a term that
- * uniquely identify the entry.
- * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
- * double file opening.
- */
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
- searchForContainers = true;
- }
- else if (workType == WorkType.INDEX)
- {
- Document doc = getDocument(entity, id);
- queue.add(new DeleteLuceneWork(id, idInString, entityClass));
- LuceneWork work = new AddLuceneWork(id, idInString, entityClass, doc);
- work.setBatch(true);
- queue.add(work);
- searchForContainers = true;
- }
+ else {
+ throw new AssertionFailure( "Unknown WorkType: " + workType );
+ }
- else
- {
- throw new AssertionFailure("Unknown WorkType: " + workType);
- }
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ if ( searchForContainers ) {
+ processContainedIn( entity, queue, rootPropertiesMetadata, searchFactoryImplementor );
+ }
+ }
- /**
- * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
- * have to be updated)
- * When the internal object is changed, we apply the {Add|Update}Work on containedIns
- */
- if (searchForContainers)
- {
- processContainedIn(entity, queue, rootPropertiesMetadata, searchFactoryImplementor);
- }
- }
+ private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
+ for (int i = 0; i < metadata.containedInGetters.size(); i++) {
+ XMember member = metadata.containedInGetters.get( i );
+ Object value = getMemberValue( instance, member );
+ if ( value == null ) continue;
- private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor)
- {
- for (int i = 0; i < metadata.containedInGetters.size(); i++)
- {
- XMember member = metadata.containedInGetters.get(i);
- Object value = getMemberValue(instance, member);
- if (value == null) continue;
+ if ( member.isArray() ) {
+ for (Object arrayValue : (Object[]) value) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass( arrayValue );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( arrayValue, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ else if ( member.isCollection() ) {
+ Collection collection;
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum
+ collection = ( (Map) value ).values();
+ }
+ else {
+ collection = (Collection) value;
+ }
+ for (Object collectionValue : collection) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass( collectionValue );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( collectionValue, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ else {
+ Class valueClass = Hibernate.getClass( value );
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
+ if ( builder == null ) continue;
+ processContainedInValue( value, queue, valueClass, builder, searchFactoryImplementor );
+ }
+ }
+ //an embedded cannot have a useful @ContainedIn (no shared reference)
+ //do not walk through them
+ }
- if (member.isArray())
- {
- for (Object arrayValue : (Object[]) value)
- {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass(arrayValue);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(arrayValue, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- else if (member.isCollection())
- {
- Collection collection;
- if (Map.class.equals(member.getCollectionClass()))
- {
- //hum
- collection = ((Map) value).values();
- }
- else
- {
- collection = (Collection) value;
- }
- for (Object collectionValue : collection)
- {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass(collectionValue);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(collectionValue, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- else
- {
- Class valueClass = Hibernate.getClass(value);
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
- if (builder == null) continue;
- processContainedInValue(value, queue, valueClass, builder, searchFactoryImplementor);
- }
- }
- //an embedded cannot have a useful @ContainedIn (no shared reference)
- //do not walk through them
- }
+ private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
+ DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor) {
+ Serializable id = (Serializable) builder.getMemberValue( value, builder.idGetter );
+ builder.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
+ }
- private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
- DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor)
- {
- Serializable id = (Serializable) builder.getMemberValue(value, builder.idGetter);
- builder.addWorkToQueue(valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor);
- }
+ public Document getDocument(T instance, Serializable id) {
+ Document doc = new Document();
+ XClass instanceClass = reflectionManager.toXClass( Hibernate.getClass( instance ) );
+ if ( rootPropertiesMetadata.boost != null ) {
+ doc.setBoost( rootPropertiesMetadata.boost );
+ }
+ {
+ Field classField =
+ new Field( CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO );
+ doc.add( classField );
+ LuceneOptions luceneOptions = new LuceneOptions( Field.Store.YES,
+ Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost );
+ idBridge.set( idKeywordName, id, doc, luceneOptions );
+ }
+ buildDocumentFields( instance, doc, rootPropertiesMetadata );
+ return doc;
+ }
- public Document getDocument(T instance, Serializable id)
- {
- Document doc = new Document();
- XClass instanceClass = reflectionManager.toXClass(Hibernate.getClass(instance));
- if (rootPropertiesMetadata.boost != null)
- {
- doc.setBoost(rootPropertiesMetadata.boost);
- }
- {
- Field classField =
- new Field(CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO);
- doc.add(classField);
- LuceneOptions luceneOptions = new LuceneOptions(Field.Store.YES,
- Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost);
- idBridge.set(idKeywordName, id, doc, luceneOptions);
- }
- buildDocumentFields(instance, doc, rootPropertiesMetadata);
- return doc;
- }
+ private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata) {
+ if ( instance == null ) return;
+ //needed for field access: I cannot work in the proxied version
+ Object unproxiedInstance = unproxy( instance );
+ for (int i = 0; i < propertiesMetadata.classBridges.size(); i++) {
+ FieldBridge fb = propertiesMetadata.classBridges.get( i );
+ fb.set( propertiesMetadata.classNames.get( i ), unproxiedInstance,
+ doc, propertiesMetadata.getClassLuceneOptions( i ) );
+ }
+ for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++) {
+ XMember member = propertiesMetadata.fieldGetters.get( i );
+ Object value = getMemberValue( unproxiedInstance, member );
+ propertiesMetadata.fieldBridges.get( i ).set(
+ propertiesMetadata.fieldNames.get( i ), value, doc,
+ propertiesMetadata.getFieldLuceneOptions( i, getBoost( member ) ) );
+ }
+ for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++) {
+ XMember member = propertiesMetadata.embeddedGetters.get( i );
+ Object value = getMemberValue( unproxiedInstance, member );
+ //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
- private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata)
- {
- if (instance == null) return;
- //needed for field access: I cannot work in the proxied version
- Object unproxiedInstance = unproxy(instance);
- for (int i = 0; i < propertiesMetadata.classBridges.size(); i++)
- {
- FieldBridge fb = propertiesMetadata.classBridges.get(i);
- fb.set(propertiesMetadata.classNames.get(i), unproxiedInstance,
- doc, propertiesMetadata.getClassLuceneOptions(i));
- }
- for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++)
- {
- XMember member = propertiesMetadata.fieldGetters.get(i);
- Object value = getMemberValue(unproxiedInstance, member);
- propertiesMetadata.fieldBridges.get(i).set(
- propertiesMetadata.fieldNames.get(i), value, doc,
- propertiesMetadata.getFieldLuceneOptions(i, getBoost(member)));
- }
- for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++)
- {
- XMember member = propertiesMetadata.embeddedGetters.get(i);
- Object value = getMemberValue(unproxiedInstance, member);
- //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
+ if ( value == null ) continue;
+ PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get( i );
+ switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
+ case ARRAY:
+ for (Object arrayValue : (Object[]) value) {
+ buildDocumentFields( arrayValue, doc, embeddedMetadata );
+ }
+ break;
+ case COLLECTION:
+ for (Object collectionValue : (Collection) value) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case MAP:
+ for (Object collectionValue : ( (Map) value ).values()) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case OBJECT:
+ buildDocumentFields( value, doc, embeddedMetadata );
+ break;
+ default:
+ throw new AssertionFailure( "Unknown embedded container: "
+ + propertiesMetadata.embeddedContainers.get( i ) );
+ }
+ }
+ }
- if (value == null) continue;
- PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get(i);
- switch (propertiesMetadata.embeddedContainers.get(i))
- {
- case ARRAY:
- for (Object arrayValue : (Object[]) value)
- {
- buildDocumentFields(arrayValue, doc, embeddedMetadata);
- }
- break;
- case COLLECTION:
- for (Object collectionValue : (Collection) value)
- {
- buildDocumentFields(collectionValue, doc, embeddedMetadata);
- }
- break;
- case MAP:
- for (Object collectionValue : ((Map) value).values())
- {
- buildDocumentFields(collectionValue, doc, embeddedMetadata);
- }
- break;
- case OBJECT:
- buildDocumentFields(value, doc, embeddedMetadata);
- break;
- default:
- throw new AssertionFailure("Unknown embedded container: "
- + propertiesMetadata.embeddedContainers.get(i));
- }
- }
- }
+ private Object unproxy(Object value) {
+ //FIXME this service should be part of Core?
+ if ( value instanceof HibernateProxy ) {
+ // .getImplementation() initializes the data by side effect
+ value = ( (HibernateProxy) value ).getHibernateLazyInitializer()
+ .getImplementation();
+ }
+ return value;
+ }
- private Object unproxy(Object value)
- {
- //FIXME this service should be part of Core?
- if (value instanceof HibernateProxy)
- {
- // .getImplementation() initializes the data by side effect
- value = ((HibernateProxy) value).getHibernateLazyInitializer()
- .getImplementation();
- }
- return value;
- }
+ public Term getTerm(Serializable id) {
+ if ( idProvided ) {
+ return new Term( idKeywordName, (String) id );
+ }
- public Term getTerm(Serializable id)
- {
- if (idProvided)
- {
- return new Term(idKeywordName, (String) id);
- }
+ return new Term( idKeywordName, idBridge.objectToString( id ) );
+ }
- return new Term(idKeywordName, idBridge.objectToString(id));
- }
+ public DirectoryProvider[] getDirectoryProviders() {
+ return directoryProviders;
+ }
- public DirectoryProvider[] getDirectoryProviders()
- {
- return directoryProviders;
- }
+ public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
+ return shardingStrategy;
+ }
- public IndexShardingStrategy getDirectoryProviderSelectionStrategy()
- {
- return shardingStrategy;
- }
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
- public Analyzer getAnalyzer()
- {
- return analyzer;
- }
+ private static void setAccessible(XMember member) {
+ if ( !Modifier.isPublic( member.getModifiers() ) ) {
+ member.setAccessible( true );
+ }
+ }
- private static void setAccessible(XMember member)
- {
- if (!Modifier.isPublic(member.getModifiers()))
- {
- member.setAccessible(true);
- }
- }
+ public TwoWayFieldBridge getIdBridge() {
+ return idBridge;
+ }
- public TwoWayFieldBridge getIdBridge()
- {
- return idBridge;
- }
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
- public String getIdKeywordName()
- {
- return idKeywordName;
- }
+ public static Class getDocumentClass(Document document) {
+ String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
+ try {
+ return ReflectHelper.classForName( className );
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to load indexed class: " + className, e );
+ }
+ }
- public static Class getDocumentClass(Document document)
- {
- String className = document.get(DocumentBuilder.CLASS_FIELDNAME);
- try
- {
- return ReflectHelper.classForName(className);
- }
- catch (ClassNotFoundException e)
- {
- throw new SearchException("Unable to load indexed class: " + className, e);
- }
- }
+ public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document) {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
+ if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
+ }
- public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document)
- {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
- if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
- return (Serializable) builder.getIdBridge().get(builder.getIdKeywordName(), document);
- }
+ public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields) {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
+ if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ final int fieldNbr = fields.length;
+ Object[] result = new Object[fieldNbr];
- public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields)
- {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
- if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
- final int fieldNbr = fields.length;
- Object[] result = new Object[fieldNbr];
+ if ( builder.idKeywordName != null ) {
+ populateResult( builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document );
+ }
- if (builder.idKeywordName != null)
- {
- populateResult(builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document);
- }
+ final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
+ processFieldsForProjection( metadata, fields, result, document );
+ return result;
+ }
- final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
- processFieldsForProjection(metadata, fields, result, document);
- return result;
- }
+ private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document) {
+ final int nbrFoEntityFields = metadata.fieldNames.size();
+ for (int index = 0; index < nbrFoEntityFields; index++) {
+ populateResult( metadata.fieldNames.get( index ),
+ metadata.fieldBridges.get( index ),
+ metadata.fieldStore.get( index ),
+ fields,
+ result,
+ document
+ );
+ }
+ final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
+ for (int index = 0; index < nbrOfEmbeddedObjects; index++) {
+ //there is nothing we can do for collections
+ if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT ) {
+ processFieldsForProjection( metadata.embeddedPropertiesMetadata.get( index ), fields, result, document );
+ }
+ }
+ }
- private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document)
- {
- final int nbrFoEntityFields = metadata.fieldNames.size();
- for (int index = 0; index < nbrFoEntityFields; index++)
- {
- populateResult(metadata.fieldNames.get(index),
- metadata.fieldBridges.get(index),
- metadata.fieldStore.get(index),
- fields,
- result,
- document
- );
- }
- final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
- for (int index = 0; index < nbrOfEmbeddedObjects; index++)
- {
- //there is nothing we can do for collections
- if (metadata.embeddedContainers.get(index) == PropertiesMetadata.Container.OBJECT)
- {
- processFieldsForProjection(metadata.embeddedPropertiesMetadata.get(index), fields, result, document);
- }
- }
- }
+ private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
+ String[] fields, Object[] result, Document document) {
+ int matchingPosition = getFieldPosition( fields, fieldName );
+ if ( matchingPosition != -1 ) {
+ //TODO make use of an isTwoWay() method
+ if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom( fieldBridge.getClass() ) ) {
+ result[matchingPosition] = ( (TwoWayFieldBridge) fieldBridge ).get( fieldName, document );
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Field {} projected as {}", fieldName, result[matchingPosition] );
+ }
+ }
+ else {
+ if ( store == Field.Store.NO ) {
+ throw new SearchException( "Projecting an unstored field: " + fieldName );
+ }
+ else {
+ throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass() );
+ }
+ }
+ }
+ }
- private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
- String[] fields, Object[] result, Document document)
- {
- int matchingPosition = getFieldPosition(fields, fieldName);
- if (matchingPosition != -1)
- {
- //TODO make use of an isTwoWay() method
- if (store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom(fieldBridge.getClass()))
- {
- result[matchingPosition] = ((TwoWayFieldBridge) fieldBridge).get(fieldName, document);
- if (log.isTraceEnabled())
- {
- log.trace("Field {} projected as {}", fieldName, result[matchingPosition]);
- }
- }
- else
- {
- if (store == Field.Store.NO)
- {
- throw new SearchException("Projecting an unstored field: " + fieldName);
- }
- else
- {
- throw new SearchException("FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass());
- }
- }
- }
- }
+ private static int getFieldPosition(String[] fields, String fieldName) {
+ int fieldNbr = fields.length;
+ for (int index = 0; index < fieldNbr; index++) {
+ if ( fieldName.equals( fields[index] ) ) return index;
+ }
+ return -1;
+ }
- private static int getFieldPosition(String[] fields, String fieldName)
- {
- int fieldNbr = fields.length;
- for (int index = 0; index < fieldNbr; index++)
- {
- if (fieldName.equals(fields[index])) return index;
- }
- return -1;
- }
+ public void postInitialize(Set<Class> indexedClasses) {
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass( beanClass );
+ Set<Class> tempMappedSubclasses = new HashSet<Class>();
+ //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
+ for (Class currentClass : indexedClasses) {
+ if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add( currentClass );
+ }
+ this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
+ Class superClass = plainClass.getSuperclass();
+ this.isRoot = true;
+ while ( superClass != null ) {
+ if ( indexedClasses.contains( superClass ) ) {
+ this.isRoot = false;
+ break;
+ }
+ superClass = superClass.getSuperclass();
+ }
+ }
- public void postInitialize(Set<Class> indexedClasses)
- {
- //this method does not requires synchronization
- Class plainClass = reflectionManager.toClass(beanClass);
- Set<Class> tempMappedSubclasses = new HashSet<Class>();
- //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
- for (Class currentClass : indexedClasses)
- {
- if (plainClass.isAssignableFrom(currentClass)) tempMappedSubclasses.add(currentClass);
- }
- this.mappedSubclasses = Collections.unmodifiableSet(tempMappedSubclasses);
- Class superClass = plainClass.getSuperclass();
- this.isRoot = true;
- while (superClass != null)
- {
- if (indexedClasses.contains(superClass))
- {
- this.isRoot = false;
- break;
- }
- superClass = superClass.getSuperclass();
- }
- }
+ public Set<Class> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
- public Set<Class> getMappedSubclasses()
- {
- return mappedSubclasses;
- }
+ /**
+ * Make sure to return false if there is a risk of composite id
+ * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ */
+ public boolean isSafeFromTupleId() {
+ return safeFromTupleId;
+ }
- /**
- * Make sure to return false if there is a risk of composite id
- * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- */
- public boolean isSafeFromTupleId()
- {
- return safeFromTupleId;
- }
+ /**
+ * Wrapper class containing all the meta data extracted out of the entities.
+ */
+ private static class PropertiesMetadata {
+ public Float boost;
+ public Analyzer analyzer;
+ public final List<String> fieldNames = new ArrayList<String>();
+ public final List<XMember> fieldGetters = new ArrayList<XMember>();
+ public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
+ public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+ public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
+ public final List<XMember> embeddedGetters = new ArrayList<XMember>();
+ public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
+ public final List<Container> embeddedContainers = new ArrayList<Container>();
+ public final List<XMember> containedInGetters = new ArrayList<XMember>();
+ public final List<String> classNames = new ArrayList<String>();
+ public final List<Field.Store> classStores = new ArrayList<Field.Store>();
+ public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
+ public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
+ public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
+ public final List<Float> classBoosts = new ArrayList<Float>();
- /**
- * Wrapper class containing all the meta data extracted out of the entities.
- */
- private static class PropertiesMetadata
- {
- public Float boost;
- public Analyzer analyzer;
- public final List<String> fieldNames = new ArrayList<String>();
- public final List<XMember> fieldGetters = new ArrayList<XMember>();
- public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
- public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
- public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
- public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
- public final List<XMember> embeddedGetters = new ArrayList<XMember>();
- public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
- public final List<Container> embeddedContainers = new ArrayList<Container>();
- public final List<XMember> containedInGetters = new ArrayList<XMember>();
- public final List<String> classNames = new ArrayList<String>();
- public final List<Field.Store> classStores = new ArrayList<Field.Store>();
- public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
- public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
- public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
- public final List<Float> classBoosts = new ArrayList<Float>();
+ public enum Container {
+ OBJECT,
+ COLLECTION,
+ MAP,
+ ARRAY
+ }
- public enum Container
- {
- OBJECT,
- COLLECTION,
- MAP,
- ARRAY
- }
+ private LuceneOptions getClassLuceneOptions(int i) {
+ LuceneOptions options = new LuceneOptions( classStores.get( i ),
+ classIndexes.get( i ), classTermVectors.get( i ), classBoosts.get( i ) );
+ return options;
+ }
- private LuceneOptions getClassLuceneOptions(int i)
- {
- LuceneOptions options = new LuceneOptions(classStores.get(i),
- classIndexes.get(i), classTermVectors.get(i), classBoosts.get(i));
- return options;
- }
-
- private LuceneOptions getFieldLuceneOptions(int i, Float boost)
- {
- LuceneOptions options = new LuceneOptions(fieldStore.get(i),
- fieldIndex.get(i), fieldTermVectors.get(i), boost);
- return options;
- }
- }
+ private LuceneOptions getFieldLuceneOptions(int i, Float boost) {
+ LuceneOptions options = new LuceneOptions( fieldStore.get( i ),
+ fieldIndex.get( i ), fieldTermVectors.get( i ), boost );
+ return options;
+ }
+ }
}
Added: search/trunk/src/java/org/hibernate/search/event/ContextHolder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/event/ContextHolder.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/event/ContextHolder.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -0,0 +1,34 @@
+package org.hibernate.search.event;
+
+import java.util.WeakHashMap;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
+import org.hibernate.search.impl.SearchFactoryImpl;
+
+/**
+ * Holds already built SearchFactory per Hibernate Configuration object
+ * concurrent threads do not share this information
+ *
+ * @author Emmanuel Bernard
+ */
+public class ContextHolder {
+ private static final ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>> contexts =
+ new ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>>();
+
+ //code doesn't have to be multithreaded because SF creation is not.
+ //this is not a public API, should really only be used during the SessionFActory building
+ public static SearchFactoryImpl getOrBuildSearchFactory(Configuration cfg) {
+ WeakHashMap<Configuration, SearchFactoryImpl> contextMap = contexts.get();
+ if ( contextMap == null ) {
+ contextMap = new WeakHashMap<Configuration, SearchFactoryImpl>( 2 );
+ contexts.set( contextMap );
+ }
+ SearchFactoryImpl searchFactory = contextMap.get( cfg );
+ if ( searchFactory == null ) {
+ searchFactory = new SearchFactoryImpl( new SearchConfigurationFromHibernateCore( cfg ) );
+ contextMap.put( cfg, searchFactory );
+ }
+ return searchFactory;
+ }
+}
Modified: search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -7,6 +7,7 @@
import org.hibernate.engine.EntityEntry;
import org.hibernate.event.AbstractCollectionEvent;
import org.hibernate.event.AbstractEvent;
+import org.hibernate.event.Destructible;
import org.hibernate.event.Initializable;
import org.hibernate.event.PostCollectionRecreateEvent;
import org.hibernate.event.PostCollectionRecreateEventListener;
@@ -20,15 +21,14 @@
import org.hibernate.event.PostInsertEventListener;
import org.hibernate.event.PostUpdateEvent;
import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.event.Destructible;
-import org.hibernate.search.backend.WorkType;
import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.transaction.EventSourceTransactionContext;
-import org.hibernate.search.cfg.SearchConfiguration;
-import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -42,150 +42,116 @@
*/
//TODO work on sharing the same indexWriters and readers across a single post operation...
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
- at SuppressWarnings("serial")
+ at SuppressWarnings( "serial" )
public class FullTextIndexEventListener implements PostDeleteEventListener,
- PostInsertEventListener, PostUpdateEventListener,
- PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
- PostCollectionUpdateEventListener, Initializable, Destructible
-{
+ PostInsertEventListener, PostUpdateEventListener,
+ PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener, Initializable, Destructible {
- private static final Logger log = LoggerFactory.getLogger(FullTextIndexEventListener.class);
+ private static final Logger log = LoggerFactory.getLogger( FullTextIndexEventListener.class );
- protected boolean used;
- protected SearchFactoryImplementor searchFactoryImplementor;
+ protected boolean used;
+ protected SearchFactoryImplementor searchFactoryImplementor;
- /**
- * Method that uses the SearchConfiguration interface as opposed to directly with Hibernate Core. This was edited
- * for the JBoss Cache integration.
- *
- * @param searchConfiguration - so that it calls the same methods on the interface.
- */
+ /**
+ * Initialize method called by Hibernate Core when the SessionFactory starts
+ */
- public void initialize(SearchConfiguration searchConfiguration)
- {
+ public void initialize(Configuration cfg) {
+ searchFactoryImplementor = ContextHolder.getOrBuildSearchFactory( cfg );
+ String indexingStrategy = searchFactoryImplementor.getIndexingStrategy();
+ if ( "event".equals( indexingStrategy ) ) {
+ used = searchFactoryImplementor.getDocumentBuilders().size() != 0;
+ }
+ else if ( "manual".equals( indexingStrategy ) ) {
+ used = false;
+ }
+ }
- searchFactoryImplementor = SearchFactoryImpl.getSearchFactory(searchConfiguration);
- String indexingStrategy = searchFactoryImplementor.getIndexingStrategy();
- if ("event".equals(indexingStrategy))
- {
- used = searchFactoryImplementor.getDocumentBuilders().size() != 0;
- }
- else if ("manual".equals(indexingStrategy))
- {
- used = false;
- }
- }
+ public SearchFactoryImplementor getSearchFactoryImplementor() {
+ return searchFactoryImplementor;
+ }
- public void initialize(Configuration cfg)
- {
- SearchConfiguration searchConfig = new SearchConfigurationFromHibernateCore(cfg);
- initialize(searchConfig);
+ public void onPostDelete(PostDeleteEvent event) {
+ if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( event.getEntity().getClass() ) ) {
+ processWork( event.getEntity(), event.getId(), WorkType.DELETE, event );
+ }
+ }
- }
+ public void onPostInsert(PostInsertEvent event) {
+ if ( used ) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entity.getClass() );
+ //not strictly necessary but a small optimization
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ processWork( entity, id, WorkType.ADD, event );
+ }
+ }
+ }
- public SearchFactoryImplementor getSearchFactoryImplementor()
- {
- return searchFactoryImplementor;
- }
+ public void onPostUpdate(PostUpdateEvent event) {
+ if ( used ) {
+ final Object entity = event.getEntity();
+ //not strictly necessary but a small optimization
+ DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entity.getClass() );
+ if ( builder != null ) {
+ Serializable id = event.getId();
+ processWork( entity, id, WorkType.UPDATE, event );
+ }
+ }
+ }
- public void onPostDelete(PostDeleteEvent event)
- {
- if (used && searchFactoryImplementor.getDocumentBuilders().containsKey(event.getEntity().getClass()))
- {
- processWork(event.getEntity(), event.getId(), WorkType.DELETE, event);
- }
- }
+ protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
+ Work work = new Work( entity, id, workType );
+ final EventSourceTransactionContext transactionContext = new EventSourceTransactionContext( event.getSession() );
+ searchFactoryImplementor.getWorker().performWork( work, transactionContext );
+ }
- public void onPostInsert(PostInsertEvent event)
- {
- if (used)
- {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get(entity.getClass());
- //not strictly necessary but a small optimization
- if (builder != null)
- {
- Serializable id = event.getId();
- processWork(entity, id, WorkType.ADD, event);
- }
- }
- }
+ public void cleanup() {
+ searchFactoryImplementor.close();
+ }
- public void onPostUpdate(PostUpdateEvent event)
- {
- if (used)
- {
- final Object entity = event.getEntity();
- //not strictly necessary but a small optimization
- DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get(entity.getClass());
- if (builder != null)
- {
- Serializable id = event.getId();
- processWork(entity, id, WorkType.UPDATE, event);
- }
- }
- }
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
+ processCollectionEvent( event );
+ }
- protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event)
- {
- Work work = new Work(entity, id, workType);
- searchFactoryImplementor.getWorker().performWork(work, new EventSourceTransactionContext(event.getSession()));
- }
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
+ processCollectionEvent( event );
+ }
- public void cleanup()
- {
- searchFactoryImplementor.close();
- }
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
+ processCollectionEvent( event );
+ }
- public void onPostRecreateCollection(PostCollectionRecreateEvent event)
- {
- processCollectionEvent(event);
- }
+ protected void processCollectionEvent(AbstractCollectionEvent event) {
+ Object entity = event.getAffectedOwnerOrNull();
+ if ( entity == null ) {
+ //Hibernate cannot determine every single time the owner especially in case detached objects are involved
+ // or property-ref is used
+ //Should log really but we don't know if we're interested in this collection for indexing
+ return;
+ }
+ if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( entity.getClass() ) ) {
+ Serializable id = getId( entity, event );
+ if ( id == null ) {
+ log.warn(
+ "Unable to reindex entity on collection change, id cannot be extracted: {}",
+ event.getAffectedOwnerEntityName()
+ );
+ return;
+ }
+ processWork( entity, id, WorkType.COLLECTION, event );
+ }
+ }
- public void onPostRemoveCollection(PostCollectionRemoveEvent event)
- {
- processCollectionEvent(event);
- }
-
- public void onPostUpdateCollection(PostCollectionUpdateEvent event)
- {
- processCollectionEvent(event);
- }
-
- protected void processCollectionEvent(AbstractCollectionEvent event)
- {
- Object entity = event.getAffectedOwnerOrNull();
- if (entity == null)
- {
- //Hibernate cannot determine every single time the owner especially in case detached objects are involved
- // or property-ref is used
- //Should log really but we don't know if we're interested in this collection for indexing
- return;
- }
- if (used && searchFactoryImplementor.getDocumentBuilders().containsKey(entity.getClass()))
- {
- Serializable id = getId(entity, event);
- if (id == null)
- {
- log.warn(
- "Unable to reindex entity on collection change, id cannot be extracted: {}",
- event.getAffectedOwnerEntityName()
- );
- return;
- }
- processWork(entity, id, WorkType.COLLECTION, event);
- }
- }
-
- private Serializable getId(Object entity, AbstractCollectionEvent event)
- {
- Serializable id = event.getAffectedOwnerIdOrNull();
- if (id == null)
- {
- //most likely this recovery is unnecessary since Hibernate Core probably try that
- EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry(entity);
- id = entityEntry == null ? null : entityEntry.getId();
- }
- return id;
- }
+ private Serializable getId(Object entity, AbstractCollectionEvent event) {
+ Serializable id = event.getAffectedOwnerIdOrNull();
+ if ( id == null ) {
+ //most likely this recovery is unnecessary since Hibernate Core probably try that
+ EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( entity );
+ id = entityEntry == null ? null : entityEntry.getId();
+ }
+ return id;
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -43,13 +43,13 @@
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.SearchFactory;
-import org.hibernate.search.transaction.TransactionContext;
-import org.hibernate.search.transaction.EventSourceTransactionContext;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.query.FullTextQueryImpl;
+import org.hibernate.search.transaction.EventSourceTransactionContext;
+import org.hibernate.search.transaction.TransactionContext;
import org.hibernate.search.util.ContextHelper;
import org.hibernate.stat.SessionStatistics;
import org.hibernate.type.Type;
@@ -61,17 +61,17 @@
* @author John Griffin
* @author Hardy Ferentschik
*/
- at SuppressWarnings({"serial", "unchecked"})
+ at SuppressWarnings( { "serial", "unchecked" } )
public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
private final Session session;
private final SessionImplementor sessionImplementor;
private transient SearchFactoryImplementor searchFactory;
- private final TransactionContext transactionContext;
+ private final TransactionContext transactionContext;
- public FullTextSessionImpl(org.hibernate.Session session) {
+ public FullTextSessionImpl(org.hibernate.Session session) {
this.session = (Session) session;
- this.transactionContext = new EventSourceTransactionContext((EventSource) session);
+ this.transactionContext = new EventSourceTransactionContext( (EventSource) session );
this.sessionImplementor = (SessionImplementor) session;
}
@@ -82,7 +82,7 @@
* @param entities must be immutable for the lifetime of the query object
*/
public FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
- return new FullTextQueryImpl( luceneQuery, entities, sessionImplementor, new ParameterMetadata(null, null) );
+ return new FullTextQueryImpl( luceneQuery, entities, sessionImplementor, new ParameterMetadata( null, null ) );
}
/**
@@ -97,7 +97,7 @@
public void flushToIndexes() {
SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
- searchFactoryImplementor.getWorker().flushWorks(transactionContext);
+ searchFactoryImplementor.getWorker().flushWorks( transactionContext );
}
/**
@@ -105,7 +105,6 @@
*
* @param entityType
* @param id
- *
* @throws IllegalArgumentException if entityType is null or not an @Indexed entity type
*/
public void purge(Class entityType, Serializable id) {
@@ -126,7 +125,7 @@
else {
type = WorkType.PURGE;
}
- Work work = new Work(entityType, id, type);
+ Work work = new Work( entityType, id, type );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
}
@@ -139,7 +138,8 @@
* @throws IllegalArgumentException if entity is null or not an @Indexed entity
*/
public void index(Object entity) {
- if (entity == null) throw new IllegalArgumentException("Entity to index should not be null");;
+ if ( entity == null ) throw new IllegalArgumentException( "Entity to index should not be null" );
+ ;
Class clazz = Hibernate.getClass( entity );
//TODO cache that at the FTSession level
SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
@@ -149,7 +149,7 @@
throw new IllegalArgumentException( "Entity to index not an @Indexed entity: " + entity.getClass().getName() );
}
Serializable id = session.getIdentifier( entity );
- Work work = new Work(entity, id, WorkType.INDEX);
+ Work work = new Work( entity, id, WorkType.INDEX );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
//TODO
@@ -162,14 +162,14 @@
}
public SearchFactory getSearchFactory() {
- if (searchFactory == null) {
+ if ( searchFactory == null ) {
searchFactory = ContextHelper.getSearchFactory( session );
}
return searchFactory;
}
private SearchFactoryImplementor getSearchFactoryImplementor() {
- if (searchFactory == null) {
+ if ( searchFactory == null ) {
searchFactory = ContextHelper.getSearchFactory( session );
}
return searchFactory;
@@ -410,7 +410,7 @@
}
public List list(String query, QueryParameters queryParameters) throws HibernateException {
- return sessionImplementor.list( query, queryParameters );
+ return sessionImplementor.list( query, queryParameters );
}
public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException {
Modified: search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -56,7 +56,6 @@
/**
* @author Emmanuel Bernard
*/
- at SuppressWarnings("unchecked")
public class SearchFactoryImpl implements SearchFactoryImplementor {
private static final ThreadLocal<WeakHashMap<SearchConfiguration, SearchFactoryImpl>> contexts =
new ThreadLocal<WeakHashMap<SearchConfiguration, SearchFactoryImpl>>();
@@ -95,10 +94,11 @@
this.backendQueueProcessorFactory = backendQueueProcessorFactory;
}
- @SuppressWarnings( "unchecked" )
public SearchFactoryImpl(SearchConfiguration cfg) {
- //yuk
- ReflectionManager reflectionManager = getReflectionManager( cfg );
+ ReflectionManager reflectionManager = cfg.getReflectionManager();
+ if ( reflectionManager == null ) {
+ reflectionManager = new JavaReflectionManager();
+ }
this.indexingStrategy = defineIndexingStrategy( cfg ); //need to be done before the document builds
initDocumentBuilders( cfg, reflectionManager );
@@ -215,24 +215,7 @@
filterDefinitions.put( defAnn.name(), filterDef );
}
- //code doesn't have to be multithreaded because SF creation is not.
- //this is not a public API, should really only be used during the SessionFActory building
- //FIXME this is ugly, impl.staticmethod, fix that
- public static SearchFactoryImpl getSearchFactory(SearchConfiguration cfg) {
- WeakHashMap<SearchConfiguration, SearchFactoryImpl> contextMap = contexts.get();
- if ( contextMap == null ) {
- contextMap = new WeakHashMap<SearchConfiguration, SearchFactoryImpl>( 2 );
- contexts.set( contextMap );
- }
- SearchFactoryImpl searchFactory = contextMap.get( cfg );
- if ( searchFactory == null ) {
- searchFactory = new SearchFactoryImpl( cfg );
- contextMap.put( cfg, searchFactory );
- }
- return searchFactory;
- }
-
public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
return documentBuilders;
}
@@ -270,23 +253,6 @@
return readerProvider;
}
- //not happy about having it as a helper class but I don't want cfg to be associated with the SearchFactory
- public static ReflectionManager getReflectionManager(SearchConfiguration cfg) {
- ReflectionManager reflectionManager;
- try {
- //TODO introduce a ReflectionManagerHolder interface to avoid reflection
- //I want to avoid hard link between HAN and Validator for such a simple need
- //reuse the existing reflectionManager one when possible
- reflectionManager =
- (ReflectionManager) cfg.getClass().getMethod( "getReflectionManager" ).invoke( cfg );
-
- }
- catch (Exception e) {
- reflectionManager = new JavaReflectionManager();
- }
- return reflectionManager;
- }
-
public DirectoryProvider[] getDirectoryProviders(Class entity) {
DocumentBuilder<Object> documentBuilder = getDocumentBuilders().get( entity );
return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
@@ -316,7 +282,7 @@
private void initDocumentBuilders(SearchConfiguration cfg, ReflectionManager reflectionManager) {
InitContext context = new InitContext( cfg );
- Iterator<Class> iter = cfg.getClassMappings();
+ Iterator<Class<?>> iter = cfg.getClassMappings();
DirectoryProviderFactory factory = new DirectoryProviderFactory();
while ( iter.hasNext() ) {
Modified: search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -7,14 +7,14 @@
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
import org.hibernate.search.SearchException;
-import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.LuceneIndexingParameters;
import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
import org.hibernate.search.backend.configuration.MaskedProperty;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.engine.SearchFactoryImplementor;
-import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.store.optimization.IncrementalOptimizerStrategy;
import org.hibernate.search.store.optimization.NoOpOptimizerStrategy;
import org.hibernate.search.store.optimization.OptimizerStrategy;
@@ -42,272 +42,232 @@
* @author Hardy Ferentschik
* @author Sanne Grinovero
*/
-public class DirectoryProviderFactory
-{
+public class DirectoryProviderFactory {
- private final List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
- private static final String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
+ private final List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
+ private static final String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
- private static final String SHARDING_STRATEGY = "sharding_strategy";
- private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
+ private static final String SHARDING_STRATEGY = "sharding_strategy";
+ private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
- public DirectoryProviders createDirectoryProviders(XClass entity, SearchConfiguration cfg,
- SearchFactoryImplementor searchFactoryImplementor,
- ReflectionManager reflectionManager)
- {
- //get properties
- String directoryProviderName = getDirectoryProviderName(entity, cfg);
- Properties[] indexProps = getDirectoryProperties(cfg, directoryProviderName);
+ public DirectoryProviders createDirectoryProviders(XClass entity, SearchConfiguration cfg,
+ SearchFactoryImplementor searchFactoryImplementor,
+ ReflectionManager reflectionManager) {
+ //get properties
+ String directoryProviderName = getDirectoryProviderName( entity, cfg );
+ Properties[] indexProps = getDirectoryProperties( cfg, directoryProviderName );
- //set up the directories
- int nbrOfProviders = indexProps.length;
- DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
- for (int index = 0; index < nbrOfProviders; index++)
- {
- String providerName = nbrOfProviders > 1 ?
- directoryProviderName + "." + index :
- directoryProviderName;
- providers[index] = createDirectoryProvider(providerName, indexProps[index],
- reflectionManager.toClass(entity), searchFactoryImplementor);
- }
+ //set up the directories
+ int nbrOfProviders = indexProps.length;
+ DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
+ for (int index = 0; index < nbrOfProviders; index++) {
+ String providerName = nbrOfProviders > 1 ?
+ directoryProviderName + "." + index :
+ directoryProviderName;
+ providers[index] = createDirectoryProvider( providerName, indexProps[index],
+ reflectionManager.toClass( entity ), searchFactoryImplementor );
+ }
- //define sharding strategy
- IndexShardingStrategy shardingStrategy;
- //any indexProperty will do, the indexProps[0] surely exists.
- String shardingStrategyName = indexProps[0].getProperty(SHARDING_STRATEGY);
- if (shardingStrategyName == null)
- {
- if (indexProps.length == 1)
- {
- shardingStrategy = new NotShardedStrategy();
- }
- else
- {
- shardingStrategy = new IdHashShardingStrategy();
- }
- }
- else
- {
- try
- {
- Class shardigStrategyClass = ReflectHelper.classForName(shardingStrategyName, this.getClass());
- shardingStrategy = (IndexShardingStrategy) shardigStrategyClass.newInstance();
- }
- catch (ClassNotFoundException e)
- {
- throw new SearchException("Unable to find ShardingStrategy class " + shardingStrategyName + " for " + directoryProviderName, e);
- }
- catch (IllegalAccessException e)
- {
- throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
- + " Be sure to have a no-arg constructor", e);
- }
- catch (InstantiationException e)
- {
- throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
- + " Be sure to have a no-arg constructor", e);
- }
- catch (ClassCastException e)
- {
- throw new SearchException("ShardingStrategy class does not implements DirecotryProviderShardingStrategy: "
- + shardingStrategyName, e);
- }
- }
- shardingStrategy.initialize(
- new MaskedProperty(indexProps[0], SHARDING_STRATEGY), providers);
- return new DirectoryProviders(shardingStrategy, providers);
- }
+ //define sharding strategy
+ IndexShardingStrategy shardingStrategy;
+ //any indexProperty will do, the indexProps[0] surely exists.
+ String shardingStrategyName = indexProps[0].getProperty( SHARDING_STRATEGY );
+ if ( shardingStrategyName == null ) {
+ if ( indexProps.length == 1 ) {
+ shardingStrategy = new NotShardedStrategy();
+ }
+ else {
+ shardingStrategy = new IdHashShardingStrategy();
+ }
+ }
+ else {
+ try {
+ Class shardigStrategyClass = ReflectHelper.classForName( shardingStrategyName, this.getClass() );
+ shardingStrategy = (IndexShardingStrategy) shardigStrategyClass.newInstance();
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find ShardingStrategy class " + shardingStrategyName + " for " + directoryProviderName, e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to create instance of ShardingStrategy class " + shardingStrategyName
+ + " Be sure to have a no-arg constructor", e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to create instance of ShardingStrategy class " + shardingStrategyName
+ + " Be sure to have a no-arg constructor", e );
+ }
+ catch (ClassCastException e) {
+ throw new SearchException( "ShardingStrategy class does not implements DirecotryProviderShardingStrategy: "
+ + shardingStrategyName, e );
+ }
+ }
+ shardingStrategy.initialize(
+ new MaskedProperty( indexProps[0], SHARDING_STRATEGY ), providers );
+ return new DirectoryProviders( shardingStrategy, providers );
+ }
- public void startDirectoryProviders()
- {
- for (DirectoryProvider provider : providers)
- {
- provider.start();
- }
- }
+ public void startDirectoryProviders() {
+ for (DirectoryProvider provider : providers) {
+ provider.start();
+ }
+ }
- private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps,
- Class entity, SearchFactoryImplementor searchFactoryImplementor)
- {
- String className = indexProps.getProperty("directory_provider");
- if (StringHelper.isEmpty(className))
- {
- className = DEFAULT_DIRECTORY_PROVIDER;
- }
- DirectoryProvider<?> provider;
- try
- {
- @SuppressWarnings("unchecked")
- Class<DirectoryProvider> directoryClass = ReflectHelper.classForName(
- className, DirectoryProviderFactory.class
- );
- provider = directoryClass.newInstance();
- }
- catch (Exception e)
- {
- throw new SearchException("Unable to instantiate directory provider: " + className, e);
- }
- try
- {
- provider.initialize(directoryProviderName, indexProps, searchFactoryImplementor);
- }
- catch (Exception e)
- {
- throw new SearchException("Unable to initialize: " + directoryProviderName, e);
- }
- int index = providers.indexOf(provider);
- if (index != -1)
- {
- //share the same Directory provider for the same underlying store
- final DirectoryProvider<?> directoryProvider = providers.get(index);
- searchFactoryImplementor.addClassToDirectoryProvider(entity, directoryProvider);
- return directoryProvider;
- }
- else
- {
- configureOptimizerStrategy(searchFactoryImplementor, indexProps, provider);
- configureIndexingParameters(searchFactoryImplementor, indexProps, provider);
- providers.add(provider);
- searchFactoryImplementor.addClassToDirectoryProvider(entity, provider);
- if (!searchFactoryImplementor.getDirectoryProviders().contains(provider))
- {
- searchFactoryImplementor.addDirectoryProvider(provider);
- }
- return provider;
- }
- }
+ private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps,
+ Class entity, SearchFactoryImplementor searchFactoryImplementor) {
+ String className = indexProps.getProperty( "directory_provider" );
+ if ( StringHelper.isEmpty( className ) ) {
+ className = DEFAULT_DIRECTORY_PROVIDER;
+ }
+ DirectoryProvider<?> provider;
+ try {
+ @SuppressWarnings( "unchecked" )
+ Class<DirectoryProvider> directoryClass = ReflectHelper.classForName(
+ className, DirectoryProviderFactory.class
+ );
+ provider = directoryClass.newInstance();
+ }
+ catch (Exception e) {
+ throw new SearchException( "Unable to instantiate directory provider: " + className, e );
+ }
+ try {
+ provider.initialize( directoryProviderName, indexProps, searchFactoryImplementor );
+ }
+ catch (Exception e) {
+ throw new SearchException( "Unable to initialize: " + directoryProviderName, e );
+ }
+ int index = providers.indexOf( provider );
+ if ( index != -1 ) {
+ //share the same Directory provider for the same underlying store
+ final DirectoryProvider<?> directoryProvider = providers.get( index );
+ searchFactoryImplementor.addClassToDirectoryProvider( entity, directoryProvider );
+ return directoryProvider;
+ }
+ else {
+ configureOptimizerStrategy( searchFactoryImplementor, indexProps, provider );
+ configureIndexingParameters( searchFactoryImplementor, indexProps, provider );
+ providers.add( provider );
+ searchFactoryImplementor.addClassToDirectoryProvider( entity, provider );
+ if ( !searchFactoryImplementor.getDirectoryProviders().contains( provider ) ) {
+ searchFactoryImplementor.addDirectoryProvider( provider );
+ }
+ return provider;
+ }
+ }
- private void configureOptimizerStrategy(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider)
- {
- boolean incremental = indexProps.containsKey("optimizer.operation_limit.max")
- || indexProps.containsKey("optimizer.transaction_limit.max");
- OptimizerStrategy optimizerStrategy;
- if (incremental)
- {
- optimizerStrategy = new IncrementalOptimizerStrategy();
- optimizerStrategy.initialize(provider, indexProps, searchFactoryImplementor);
- }
- else
- {
- optimizerStrategy = new NoOpOptimizerStrategy();
- }
- searchFactoryImplementor.addOptimizerStrategy(provider, optimizerStrategy);
- }
+ private void configureOptimizerStrategy(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider) {
+ boolean incremental = indexProps.containsKey( "optimizer.operation_limit.max" )
+ || indexProps.containsKey( "optimizer.transaction_limit.max" );
+ OptimizerStrategy optimizerStrategy;
+ if ( incremental ) {
+ optimizerStrategy = new IncrementalOptimizerStrategy();
+ optimizerStrategy.initialize( provider, indexProps, searchFactoryImplementor );
+ }
+ else {
+ optimizerStrategy = new NoOpOptimizerStrategy();
+ }
+ searchFactoryImplementor.addOptimizerStrategy( provider, optimizerStrategy );
+ }
- /**
- * Creates a new <code>LuceneIndexingParameters</code> instance for the specified provider.
- * If there are no matching properties in the configuration default values will be applied.
- * <p>
- * NOTE:</br>
- * If a non batch value is set in the configuration apply it also to the
- * batch mode. This covers the case where users only specify
- * parameters for the non batch mode. In this case the same parameters apply for
- * batch indexing. Parameters are found "depth-first": if a batch parameter is set
- * in a global scope it will take priority on local transaction parameters.
- * </p>
- *
- * @param searchFactoryImplementor the search factory.
- * @param directoryProperties The properties extracted from the configuration.
- * @param provider The directory provider for which to configure the indexing parameters.
- */
- private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor,
- Properties directoryProperties, DirectoryProvider<?> provider)
- {
- LuceneIndexingParameters indexingParams = new LuceneIndexingParameters(directoryProperties);
- searchFactoryImplementor.addIndexingParameters(provider, indexingParams);
- }
+ /**
+ * Creates a new <code>LuceneIndexingParameters</code> instance for the specified provider.
+ * If there are no matching properties in the configuration default values will be applied.
+ * <p>
+ * NOTE:</br>
+ * If a non batch value is set in the configuration apply it also to the
+ * batch mode. This covers the case where users only specify
+ * parameters for the non batch mode. In this case the same parameters apply for
+ * batch indexing. Parameters are found "depth-first": if a batch parameter is set
+ * in a global scope it will take priority on local transaction parameters.
+ * </p>
+ *
+ * @param searchFactoryImplementor the search factory.
+ * @param directoryProperties The properties extracted from the configuration.
+ * @param provider The directory provider for which to configure the indexing parameters.
+ */
+ private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor,
+ Properties directoryProperties, DirectoryProvider<?> provider) {
+ LuceneIndexingParameters indexingParams = new LuceneIndexingParameters( directoryProperties );
+ searchFactoryImplementor.addIndexingParameters( provider, indexingParams );
+ }
- /**
- * Returns an array of directory properties
- * Properties are defaulted. For a given property name,
- * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search.default
- * If the Index is not sharded, a single Properties is returned
- * If the index is sharded, the Properties index matches the shard index
- */
- private static Properties[] getDirectoryProperties(SearchConfiguration cfg, String directoryProviderName)
- {
- Properties rootCfg = new MaskedProperty(cfg.getProperties(), "hibernate.search");
- Properties globalProperties = new MaskedProperty(rootCfg, "default");
- Properties directoryLocalProperties = new MaskedProperty(rootCfg, directoryProviderName, globalProperties);
- final String shardsCountValue = directoryLocalProperties.getProperty(NBR_OF_SHARDS);
- if (shardsCountValue == null)
- {
- // no shards: finished.
- return new Properties[]{directoryLocalProperties};
- }
- else
- {
- // count shards
- int shardsCount = ConfigurationParseHelper.parseInt(shardsCountValue, shardsCountValue + " is not a number");
- // create shard-specific Props
- Properties[] shardLocalProperties = new Properties[shardsCount];
- for (int i = 0; i < shardsCount; i++)
- {
- shardLocalProperties[i] = new MaskedProperty(
- directoryLocalProperties, Integer.toString(i), directoryLocalProperties);
- }
- return shardLocalProperties;
- }
- }
+ /**
+ * Returns an array of directory properties
+ * Properties are defaulted. For a given property name,
+ * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search.default
+ * If the Index is not sharded, a single Properties is returned
+ * If the index is sharded, the Properties index matches the shard index
+ */
+ private static Properties[] getDirectoryProperties(SearchConfiguration cfg, String directoryProviderName) {
+ Properties rootCfg = new MaskedProperty( cfg.getProperties(), "hibernate.search" );
+ Properties globalProperties = new MaskedProperty( rootCfg, "default" );
+ Properties directoryLocalProperties = new MaskedProperty( rootCfg, directoryProviderName, globalProperties );
+ final String shardsCountValue = directoryLocalProperties.getProperty( NBR_OF_SHARDS );
+ if ( shardsCountValue == null ) {
+ // no shards: finished.
+ return new Properties[] { directoryLocalProperties };
+ }
+ else {
+ // count shards
+ int shardsCount = ConfigurationParseHelper.parseInt( shardsCountValue, shardsCountValue + " is not a number" );
+ // create shard-specific Props
+ Properties[] shardLocalProperties = new Properties[shardsCount];
+ for (int i = 0; i < shardsCount; i++) {
+ shardLocalProperties[i] = new MaskedProperty(
+ directoryLocalProperties, Integer.toString( i ), directoryLocalProperties );
+ }
+ return shardLocalProperties;
+ }
+ }
- private static String getDirectoryProviderName(XClass clazz, SearchConfiguration cfg)
- {
- //yuk
- ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
- //get the most specialized (ie subclass > superclass) non default index name
- //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
- //FIXME I'm inclined to get rid of the default value
- Class aClass = cfg.getClassMapping(clazz.getName());
- XClass rootIndex = null;
- do
- {
- XClass currentClazz = reflectionManager.toXClass(aClass);
- Indexed indexAnn = currentClazz.getAnnotation(Indexed.class);
- if (indexAnn != null)
- {
- if (indexAnn.index().length() != 0)
- {
- return indexAnn.index();
- }
- else
- {
- rootIndex = currentClazz;
- }
- }
- aClass = aClass.getSuperclass();
- }
- while (aClass != null);
- //there is nobody out there with a non default @Indexed.index
- if (rootIndex != null)
- {
- return rootIndex.getName();
- }
- else
- {
- throw new SearchException(
- "Trying to extract the index name from a non @Indexed class: " + clazz.getName());
- }
- }
+ private static String getDirectoryProviderName(XClass clazz, SearchConfiguration cfg) {
+ ReflectionManager reflectionManager = cfg.getReflectionManager();
+ if ( reflectionManager == null ) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ //get the most specialized (ie subclass > superclass) non default index name
+ //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
+ //FIXME I'm inclined to get rid of the default value
+ Class aClass = cfg.getClassMapping( clazz.getName() );
+ XClass rootIndex = null;
+ do {
+ XClass currentClazz = reflectionManager.toXClass( aClass );
+ Indexed indexAnn = currentClazz.getAnnotation( Indexed.class );
+ if ( indexAnn != null ) {
+ if ( indexAnn.index().length() != 0 ) {
+ return indexAnn.index();
+ }
+ else {
+ rootIndex = currentClazz;
+ }
+ }
+ aClass = aClass.getSuperclass();
+ }
+ while ( aClass != null );
+ //there is nobody out there with a non default @Indexed.index
+ if ( rootIndex != null ) {
+ return rootIndex.getName();
+ }
+ else {
+ throw new SearchException(
+ "Trying to extract the index name from a non @Indexed class: " + clazz.getName() );
+ }
+ }
- public static class DirectoryProviders
- {
- private final IndexShardingStrategy shardingStrategy;
- private final DirectoryProvider[] providers;
+ public static class DirectoryProviders {
+ private final IndexShardingStrategy shardingStrategy;
+ private final DirectoryProvider[] providers;
- public DirectoryProviders(IndexShardingStrategy shardingStrategy, DirectoryProvider[] providers)
- {
- this.shardingStrategy = shardingStrategy;
- this.providers = providers;
- }
+ public DirectoryProviders(IndexShardingStrategy shardingStrategy, DirectoryProvider[] providers) {
+ this.shardingStrategy = shardingStrategy;
+ this.providers = providers;
+ }
- public IndexShardingStrategy getSelectionStrategy()
- {
- return shardingStrategy;
- }
+ public IndexShardingStrategy getSelectionStrategy() {
+ return shardingStrategy;
+ }
- public DirectoryProvider[] getProviders()
- {
- return providers;
+ public DirectoryProvider[] getProviders() {
+ return providers;
}
}
Modified: search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -1,38 +1,34 @@
package org.hibernate.search.transaction;
import java.io.Serializable;
+import javax.transaction.Synchronization;
import org.hibernate.Transaction;
import org.hibernate.event.EventSource;
-import javax.transaction.Synchronization;
-
/**
- at author Navin Surtani - navin at surtani.org
+ * Implementation of the transactional context on top of an EventSource (Session)
+ *
+ * @author Navin Surtani - navin at surtani.org
*/
-public class EventSourceTransactionContext implements TransactionContext, Serializable
-{
- EventSource eventSource;
+public class EventSourceTransactionContext implements TransactionContext, Serializable {
+ EventSource eventSource;
- public EventSourceTransactionContext(EventSource eventSource)
- {
- this.eventSource = eventSource;
- }
+ public EventSourceTransactionContext(EventSource eventSource) {
+ this.eventSource = eventSource;
+ }
- public Object getTransactionIdentifier()
- {
- return eventSource.getTransaction();
- }
+ public Object getTransactionIdentifier() {
+ return eventSource.getTransaction();
+ }
- public void registerSynchronization(Synchronization synchronization)
- {
- Transaction transaction = eventSource.getTransaction();
- transaction.registerSynchronization(synchronization);
- }
+ public void registerSynchronization(Synchronization synchronization) {
+ Transaction transaction = eventSource.getTransaction();
+ transaction.registerSynchronization( synchronization );
+ }
- public boolean isTxInProgress()
- {
- return eventSource.isTransactionInProgress();
- }
+ public boolean isTransactionInProgress() {
+ return eventSource.isTransactionInProgress();
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java 2008-07-16 13:03:30 UTC (rev 14934)
+++ search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java 2008-07-16 14:37:47 UTC (rev 14935)
@@ -1,23 +1,28 @@
package org.hibernate.search.transaction;
+import java.io.Serializable;
import javax.transaction.Synchronization;
-import java.io.Serializable;
/**
- at author Navin Surtani - navin at surtani.org
+ * Contract needed by Hibernate Search to bach changes per transactio
+ *
+ * @author Navin Surtani - navin at surtani.org
*/
-public interface TransactionContext extends Serializable
-{
- /**
- *@return A boolean whether a transaction is in progress or not.
- */
- public boolean isTxInProgress();
+public interface TransactionContext {
+ /**
+ * @return A boolean whether a transaction is in progress or not.
+ */
+ public boolean isTransactionInProgress();
- /**
- *
- * @return a transaction object.
- */
- public Object getTransactionIdentifier();
+ /**
+ * @return a transaction object.
+ */
+ public Object getTransactionIdentifier();
- public void registerSynchronization(Synchronization synchronization);
+ /**
+ * register the givne synchronization
+ *
+ * @param synchronization synchronization to register
+ */
+ public void registerSynchronization(Synchronization synchronization);
}
More information about the hibernate-commits
mailing list