[hibernate-commits] Hibernate SVN: r14928 - in search/trunk/src/java/org/hibernate/search: annotations and 9 other directories.
hibernate-commits at lists.jboss.org
hibernate-commits at lists.jboss.org
Tue Jul 15 04:47:20 EDT 2008
Author: navssurtani
Date: 2008-07-15 04:47:20 -0400 (Tue, 15 Jul 2008)
New Revision: 14928
Added:
search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java
search/trunk/src/java/org/hibernate/search/cfg/
search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java
search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
search/trunk/src/java/org/hibernate/search/transaction/
search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java
search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java
Modified:
search/trunk/src/java/org/hibernate/search/backend/Worker.java
search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
search/trunk/src/java/org/hibernate/search/impl/InitContext.java
search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
search/trunk/src/java/org/hibernate/search/reader/ReaderProviderFactory.java
search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
Log:
Wrote up and implemented changes for the JBoss Cache integration project.
Added: search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/annotations/ProvidedId.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -0,0 +1,22 @@
+package org.hibernate.search.annotations;
+
+import org.hibernate.search.bridge.StringBridge;
+
+import java.lang.annotation.*;
+
+/**
+ * This annotation means that document ids will be generated externally and does not need to be
+ * contained within the class being indexed.
+ * <p />
+ * Basically, this means that classes annotated with this will NOT be scanned for {@link org.hibernate.search.annotations.DocumentId} annotated fields.
+ * @author Navin Surtani - navin at surtani.org
+ */
+ at Retention( RetentionPolicy.RUNTIME )
+ at Target( ElementType.TYPE )
+ at Documented
+public @interface ProvidedId
+{
+
+ String name() default "ProvidedId";
+ Class<StringBridge> bridge() default StringBridge.class;
+}
Modified: search/trunk/src/java/org/hibernate/search/backend/Worker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Worker.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/backend/Worker.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -5,6 +5,7 @@
import org.hibernate.event.EventSource;
import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.transaction.TransactionContext;
/**
* Perform work for a given session. This implementation has to be multi threaded
@@ -13,7 +14,7 @@
public interface Worker {
//Use of EventSource since it's the common subinterface for Session and SessionImplementor
//the alternative would have been to do a subcasting or to retrieve 2 parameters :(
- void performWork(Work work, EventSource session);
+ void performWork(Work work, TransactionContext transactionContext);
void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor);
@@ -26,5 +27,5 @@
/**
* flush any work queue
*/
- void flushWorks(EventSource session);
+ void flushWorks(TransactionContext transactionContext);
}
Modified: search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/backend/WorkerFactory.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -4,9 +4,9 @@
import java.util.Map;
import java.util.Properties;
-import org.hibernate.cfg.Configuration;
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.backend.impl.TransactionalWorker;
import org.hibernate.util.ReflectHelper;
@@ -15,47 +15,59 @@
/**
* @author Emmanuel Bernard
*/
-public abstract class WorkerFactory {
+public abstract class WorkerFactory
+{
- private static Properties getProperties(Configuration cfg) {
- Properties props = cfg.getProperties();
- Properties workerProperties = new Properties();
- for ( Map.Entry entry : props.entrySet() ) {
- String key = (String) entry.getKey();
- if ( key.startsWith( Environment.WORKER_PREFIX ) ) {
- //key.substring( Environment.WORKER_PREFIX.length() )
- workerProperties.setProperty( key, (String) entry.getValue() );
- }
- }
- return workerProperties;
- }
+ private static Properties getProperties(SearchConfiguration cfg)
+ {
+ Properties props = cfg.getProperties();
+ Properties workerProperties = new Properties();
+ for (Map.Entry entry : props.entrySet())
+ {
+ String key = (String) entry.getKey();
+ if (key.startsWith(Environment.WORKER_PREFIX))
+ {
+ //key.substring( Environment.WORKER_PREFIX.length() )
+ workerProperties.setProperty(key, (String) entry.getValue());
+ }
+ }
+ return workerProperties;
+ }
- public static Worker createWorker(Configuration cfg, SearchFactoryImplementor searchFactoryImplementor) {
- Properties props = getProperties( cfg );
- String impl = props.getProperty( Environment.WORKER_SCOPE );
- Worker worker;
- if ( StringHelper.isEmpty( impl ) ) {
- worker = new TransactionalWorker();
- }
- else if ( "transaction".equalsIgnoreCase( impl ) ) {
- worker = new TransactionalWorker();
- }
- else {
- try {
- Class workerClass = ReflectHelper.classForName( impl, WorkerFactory.class );
- worker = (Worker) workerClass.newInstance();
- }
- catch (ClassNotFoundException e) {
- throw new SearchException("Unable to find worker class: " + impl, e );
- }
- catch (IllegalAccessException e) {
- throw new SearchException("Unable to instanciate worker class: " + impl, e );
- }
- catch (InstantiationException e) {
- throw new SearchException("Unable to instanciate worker class: " + impl, e );
- }
- }
- worker.initialize( props, searchFactoryImplementor );
- return worker;
- }
+ public static Worker createWorker(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor)
+ {
+ Properties props = getProperties(cfg);
+ String impl = props.getProperty(Environment.WORKER_SCOPE);
+ Worker worker;
+ if (StringHelper.isEmpty(impl))
+ {
+ worker = new TransactionalWorker();
+ }
+ else if ("transaction".equalsIgnoreCase(impl))
+ {
+ worker = new TransactionalWorker();
+ }
+ else
+ {
+ try
+ {
+ Class workerClass = ReflectHelper.classForName(impl, WorkerFactory.class);
+ worker = (Worker) workerClass.newInstance();
+ }
+ catch (ClassNotFoundException e)
+ {
+ throw new SearchException("Unable to find worker class: " + impl, e);
+ }
+ catch (IllegalAccessException e)
+ {
+ throw new SearchException("Unable to instanciate worker class: " + impl, e);
+ }
+ catch (InstantiationException e)
+ {
+ throw new SearchException("Unable to instanciate worker class: " + impl, e);
+ }
+ }
+ worker.initialize(props, searchFactoryImplementor);
+ return worker;
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -1,62 +1,72 @@
//$Id$
package org.hibernate.search.backend.impl;
-import java.util.Properties;
-
-import org.hibernate.Transaction;
-import org.hibernate.event.EventSource;
import org.hibernate.search.backend.QueueingProcessor;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
import org.hibernate.search.backend.Worker;
import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.transaction.TransactionContext;
import org.hibernate.search.util.WeakIdentityHashMap;
+import javax.transaction.Transaction;
+import java.util.Properties;
+
/**
* Queue works per transaction.
* If out of transaction, the work is executed right away
- *
+ * <p/>
* When <code>hibernate.search.worker.type</code> is set to <code>async</code>
- * the work is done in a separate thread (threads are pooled)
+ * the work is done in a separate thread (threads are pooled)
*
* @author Emmanuel Bernard
*/
-public class TransactionalWorker implements Worker {
- //not a synchronized map since for a given transaction, we have not concurrent access
- protected final WeakIdentityHashMap synchronizationPerTransaction = new WeakIdentityHashMap();
- private QueueingProcessor queueingProcessor;
+public class TransactionalWorker implements Worker
+{
+ //not a synchronized map since for a given transaction, we have not concurrent access
+ protected final WeakIdentityHashMap synchronizationPerTransaction = new WeakIdentityHashMap();
+ private QueueingProcessor queueingProcessor;
- public void performWork(Work work, EventSource session) {
- if ( session.isTransactionInProgress() ) {
- Transaction transaction = session.getTransaction();
- PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
- synchronizationPerTransaction.get( transaction );
- if ( txSync == null || txSync.isConsumed() ) {
- txSync = new PostTransactionWorkQueueSynchronization( queueingProcessor, synchronizationPerTransaction );
- transaction.registerSynchronization( txSync );
- synchronizationPerTransaction.put(transaction, txSync);
- }
- txSync.add( work );
- }
- else {
- WorkQueue queue = new WorkQueue(2); //one work can be split
- queueingProcessor.add( work, queue );
- queueingProcessor.prepareWorks( queue );
- queueingProcessor.performWorks( queue );
- }
- }
+ public void performWork(Work work, TransactionContext transactionContext)
+ {
+ if (transactionContext.isTxInProgress())
+ {
+ Object transaction = transactionContext.getTransactionIdentifier();
+ PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
+ synchronizationPerTransaction.get(transaction);
+ if (txSync == null || txSync.isConsumed())
+ {
+ txSync = new PostTransactionWorkQueueSynchronization(queueingProcessor, synchronizationPerTransaction);
+ transactionContext.registerSynchronization(txSync);
+ synchronizationPerTransaction.put(transaction, txSync);
+ }
+ txSync.add(work);
+ }
+ else
+ {
+ WorkQueue queue = new WorkQueue(2); //one work can be split
+ queueingProcessor.add(work, queue);
+ queueingProcessor.prepareWorks(queue);
+ queueingProcessor.performWorks(queue);
+ }
+ }
- public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
- this.queueingProcessor = new BatchedQueueingProcessor( searchFactory, props );
- }
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory)
+ {
+ this.queueingProcessor = new BatchedQueueingProcessor(searchFactory, props);
+ }
- public void close() {
- queueingProcessor.close();
- }
+ public void close()
+ {
+ queueingProcessor.close();
+ }
- public void flushWorks(EventSource session) {
- if ( session.isTransactionInProgress() ) {
- Transaction transaction = session.getTransaction();
+
+
+
+ public void flushWorks(TransactionContext transactionContext) {
+ if ( transactionContext.isTxInProgress() ) {
+ Object transaction = transactionContext.getTransactionIdentifier();
PostTransactionWorkQueueSynchronization txSync = (PostTransactionWorkQueueSynchronization)
synchronizationPerTransaction.get( transaction );
if ( txSync != null && ! txSync.isConsumed() ) {
Added: search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/cfg/SearchConfiguration.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -0,0 +1,57 @@
+package org.hibernate.search.cfg;
+
+import java.util.Iterator;
+import java.util.Properties;
+
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+
+/**
+ * Provides configuration to Hibernate Search
+ *
+ * @author Navin Surtani - navin at surtani.org
+ */
+public interface SearchConfiguration {
+ /**
+ * Returns an iterator over the list of indexed classes
+ *
+ * @return iterator of indexed classes.
+ */
+
+ Iterator<Class> getClassMappings();
+
+ /**
+ * Returns a {@link java.lang.Class} from a String parameter.
+ * TODO: should it be Iteratable
+ * @param name
+ * @return java.lang.Class
+ */
+
+ Class getClassMapping(String name);
+
+ /**
+ * Gets a configuration property from its name
+ * or null if not present
+ *
+ * @param propertyName - as a String.
+ * @return the property as a String
+ */
+
+ String getProperty(String propertyName);
+
+ /**
+ * Gets properties as a java.util.Properties object.
+ *
+ * @return a java.util.Properties object.
+ * @see java.util.Properties object
+ */
+ Properties getProperties();
+
+ /**
+ * Returns a reflection manager if already available in the environment
+ * null otherwise
+ * @return ReflectionManager
+ */
+ ReflectionManager getReflectionManager();
+
+
+}
Added: search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -0,0 +1,92 @@
+package org.hibernate.search.cfg;
+
+import java.util.Properties;
+import java.util.Iterator;
+
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+
+/**
+ * Search configuration implementation wrapping an Hibernate Core configuration
+ *
+ * @author Emmanuel Bernard
+ */
+public class SearchConfigurationFromHibernateCore implements SearchConfiguration
+{
+ private org.hibernate.cfg.Configuration cfg;
+ private ReflectionManager reflectionManager;
+
+ public SearchConfigurationFromHibernateCore(org.hibernate.cfg.Configuration cfg)
+ {
+ if (cfg == null) throw new NullPointerException("Configuration is null");
+ this.cfg = cfg;
+ }
+
+ public Iterator<Class> getClassMappings()
+ {
+ return new ClassIterator(cfg.getClassMappings());
+ }
+
+ public Class getClassMapping(String name)
+ {
+ return cfg.getClassMapping(name).getMappedClass();
+ }
+
+ public String getProperty(String propertyName)
+ {
+ return cfg.getProperty(propertyName);
+ }
+
+ public Properties getProperties()
+ {
+ return cfg.getProperties();
+ }
+
+ public ReflectionManager getReflectionManager()
+ {
+ if (reflectionManager == null)
+ {
+ try
+ {
+ //TODO introduce a ReflectionManagerHolder interface to avoid reflection
+ //I want to avoid hard link between HAN and Validator for such a simple need
+ //reuse the existing reflectionManager one when possible
+ reflectionManager =
+ (ReflectionManager) cfg.getClass().getMethod("getReflectionManager").invoke(cfg);
+
+ }
+ catch (Exception e)
+ {
+ reflectionManager = new JavaReflectionManager();
+ }
+ }
+ return reflectionManager;
+ }
+
+ private class ClassIterator implements Iterator<Class>
+ {
+ private Iterator hibernatePersistentClassIterator;
+
+ private ClassIterator(Iterator hibernatePersistentClassIterator)
+ {
+ this.hibernatePersistentClassIterator = hibernatePersistentClassIterator;
+ }
+
+ public boolean hasNext()
+ {
+ return hibernatePersistentClassIterator.hasNext();
+ }
+
+ public Class next()
+ {
+ PersistentClass pc = (PersistentClass) hibernatePersistentClassIterator.next();
+ return pc.getMappedClass();
+ }
+
+ public void remove()
+ {
+ hibernatePersistentClassIterator.remove();
+ }
+ }
+}
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -27,17 +27,7 @@
import org.hibernate.annotations.common.util.StringHelper;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.search.SearchException;
-import org.hibernate.search.annotations.AnalyzerDef;
-import org.hibernate.search.annotations.AnalyzerDefs;
-import org.hibernate.search.annotations.Boost;
-import org.hibernate.search.annotations.ClassBridge;
-import org.hibernate.search.annotations.ClassBridges;
-import org.hibernate.search.annotations.ContainedIn;
-import org.hibernate.search.annotations.DocumentId;
-import org.hibernate.search.annotations.Index;
-import org.hibernate.search.annotations.IndexedEmbedded;
-import org.hibernate.search.annotations.Store;
-import org.hibernate.search.annotations.TermVector;
+import org.hibernate.search.annotations.*;
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.DeleteLuceneWork;
import org.hibernate.search.backend.LuceneWork;
@@ -48,6 +38,7 @@
import org.hibernate.search.bridge.LuceneOptions;
import org.hibernate.search.bridge.TwoWayFieldBridge;
import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
+import org.hibernate.search.bridge.builtin.StringBridge;
import org.hibernate.search.impl.InitContext;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.IndexShardingStrategy;
@@ -66,796 +57,958 @@
* @author Hardy Ferentschik
*/
@SuppressWarnings("unchecked")
-public class DocumentBuilder<T> {
- private static final Logger log = LoggerFactory.getLogger( DocumentBuilder.class );
+public class DocumentBuilder<T>
+{
+ private static final Logger log = LoggerFactory.getLogger(DocumentBuilder.class);
- private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
- private final XClass beanClass;
- private final DirectoryProvider[] directoryProviders;
- private final IndexShardingStrategy shardingStrategy;
- private String idKeywordName;
- private XMember idGetter;
- private Float idBoost;
- public static final String CLASS_FIELDNAME = "_hibernate_class";
- private TwoWayFieldBridge idBridge;
- private Set<Class> mappedSubclasses = new HashSet<Class>();
- private ReflectionManager reflectionManager;
- private int level = 0;
- private int maxLevel = Integer.MAX_VALUE;
- private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
- private Similarity similarity;
- private boolean isRoot;
- //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- private boolean safeFromTupleId;
+ private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
+ private final XClass beanClass;
+ private final DirectoryProvider[] directoryProviders;
+ private final IndexShardingStrategy shardingStrategy;
+ private String idKeywordName;
+ private XMember idGetter;
+ private Float idBoost;
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+ private TwoWayFieldBridge idBridge;
+ private Set<Class> mappedSubclasses = new HashSet<Class>();
+ private ReflectionManager reflectionManager;
+ private int level = 0;
+ private int maxLevel = Integer.MAX_VALUE;
+ private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
+ private Similarity similarity;
+ private boolean isRoot;
+ //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ private boolean safeFromTupleId;
+ private boolean idProvided = false;
- public boolean isRoot() {
- return isRoot;
- }
- public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
- IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
- this.beanClass = clazz;
- this.directoryProviders = directoryProviders;
- this.shardingStrategy = shardingStrategy;
- //FIXME get rid of it when boost is stored?
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
+ public boolean isRoot()
+ {
+ return isRoot;
+ }
- if ( clazz == null ) throw new AssertionFailure( "Unable to build a DocumentBuilder with a null class" );
- rootPropertiesMetadata.boost = getBoost( clazz );
- rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
- Set<XClass> processedClasses = new HashSet<XClass>();
- processedClasses.add( clazz );
- initializeMembers( clazz, rootPropertiesMetadata, true, "", processedClasses, context );
- //processedClasses.remove( clazz ); for the sake of completness
- this.analyzer.setGlobalAnalyzer( rootPropertiesMetadata.analyzer );
- if ( idKeywordName == null ) {
- throw new SearchException( "No document id in: " + clazz.getName() );
- }
- //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails on most database
- //a TwoWayString2FieldBridgeAdaptor is never a composite id
- safeFromTupleId = TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom( idBridge.getClass() );
- }
+ public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
+ IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager)
+ {
+ this.beanClass = clazz;
+ this.directoryProviders = directoryProviders;
+ this.shardingStrategy = shardingStrategy;
+ //FIXME get rid of it when boost is stored?
+ this.reflectionManager = reflectionManager;
+ this.similarity = context.getDefaultSimilarity();
- private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
- org.hibernate.search.annotations.Analyzer analyzerAnn =
- annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
- return getAnalyzer( analyzerAnn, context );
- }
+ if (clazz == null) throw new AssertionFailure("Unable to build a DocumentBuilder with a null class");
+ rootPropertiesMetadata.boost = getBoost(clazz);
+ rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
+ Set<XClass> processedClasses = new HashSet<XClass>();
+ processedClasses.add(clazz);
+ initializeMembers(clazz, rootPropertiesMetadata, true, "", processedClasses, context);
+ //processedClasses.remove( clazz ); for the sake of completness
+ this.analyzer.setGlobalAnalyzer(rootPropertiesMetadata.analyzer);
+ if (idKeywordName == null)
+ {
+ //check if there is a ProvidedId first.
- private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context) {
- Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
- if ( analyzerClass == void.class ) {
- String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
- if ( StringHelper.isEmpty( definition ) ) {
- return null;
- }
- else {
+ ProvidedId provided = clazz.getAnnotation(org.hibernate.search.annotations.ProvidedId.class);
+ if (provided == null) throw new SearchException("No document id in: " + clazz.getName());
+ idBridge = getProvidedIdBridge();
+ idKeywordName = provided.name();
- return context.buildLazyAnalyzer( definition );
- }
- }
- else {
- try {
- return (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new SearchException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
- );
- }
- catch (Exception e) {
- throw new SearchException( "Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e );
- }
- }
- }
- private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
- Set<XClass> processedClasses, InitContext context) {
- List<XClass> hierarchy = new ArrayList<XClass>();
- for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass()) {
- hierarchy.add( currClass );
- }
- Class similarityClass = null;
- for (int index = hierarchy.size() - 1; index >= 0; index--) {
- XClass currClass = hierarchy.get( index );
- /**
- * Override the default analyzer for the properties if the class hold one
- * That's the reason we go down the hierarchy
- */
- Analyzer analyzer = getAnalyzer( currClass, context );
+ }
+ //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails on most database
+ //a TwoWayString2FieldBridgeAdaptor is never a composite id
+ safeFromTupleId = TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom(idBridge.getClass());
+ }
- if ( analyzer != null ) {
- propertiesMetadata.analyzer = analyzer;
- }
- getAnalyzerDefs(currClass, context);
- // Check for any ClassBridges annotation.
- ClassBridges classBridgesAnn = currClass.getAnnotation( ClassBridges.class );
- if ( classBridgesAnn != null ) {
- ClassBridge[] cbs = classBridgesAnn.value();
- for (ClassBridge cb : cbs) {
- bindClassAnnotation( prefix, propertiesMetadata, cb, context );
- }
- }
+ private TwoWayFieldBridge getProvidedIdBridge()
+ {
+ return new TwoWayString2FieldBridgeAdaptor(new StringBridge());
+ }
- // Check for any ClassBridge style of annotations.
- ClassBridge classBridgeAnn = currClass.getAnnotation( ClassBridge.class );
- if ( classBridgeAnn != null ) {
- bindClassAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
- }
- //Get similarity
- //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
- if ( isRoot ) {
- org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation( org.hibernate.search.annotations.Similarity.class );
- if ( similarityAnn != null ) {
- if ( similarityClass != null ) {
- throw new SearchException( "Multiple Similarities defined in the same class hierarchy: " + beanClass.getName() );
- }
- similarityClass = similarityAnn.impl();
- }
- }
- //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
- // so indexing a non property does not make sense
- List<XProperty> methods = currClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
- for (XProperty method : methods) {
- initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses, context );
- }
+ private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context)
+ {
+ org.hibernate.search.annotations.Analyzer analyzerAnn =
+ annotatedElement.getAnnotation(org.hibernate.search.annotations.Analyzer.class);
+ return getAnalyzer(analyzerAnn, context);
+ }
- List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD );
- for (XProperty field : fields) {
- initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses, context );
- }
- }
- if ( isRoot && similarityClass != null ) {
- try {
- similarity = (Similarity) similarityClass.newInstance();
- }
- catch (Exception e) {
- log.error( "Exception attempting to instantiate Similarity '{}' set for {}",
- similarityClass.getName(), beanClass.getName() );
- }
- }
- }
+ private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context)
+ {
+ Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
+ if (analyzerClass == void.class)
+ {
+ String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
+ if (StringHelper.isEmpty(definition))
+ {
+ return null;
+ }
+ else
+ {
- private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context) {
- AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
- if ( defs != null ) {
- for (AnalyzerDef def : defs.value()) {
- context.addAnalyzerDef( def );
- }
- }
- AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
- context.addAnalyzerDef( def );
- }
+ return context.buildLazyAnalyzer(definition);
+ }
+ }
+ else
+ {
+ try
+ {
+ return (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e)
+ {
+ throw new SearchException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(), e
+ );
+ }
+ catch (Exception e)
+ {
+ throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e);
+ }
+ }
+ }
- public String getIdentifierName() {
- return idGetter.getName();
- }
+ private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
+ Set<XClass> processedClasses, InitContext context)
+ {
+ List<XClass> hierarchy = new ArrayList<XClass>();
+ for (XClass currClass = clazz; currClass != null; currClass = currClass.getSuperclass())
+ {
+ hierarchy.add(currClass);
+ }
+ Class similarityClass = null;
+ for (int index = hierarchy.size() - 1; index >= 0; index--)
+ {
+ XClass currClass = hierarchy.get(index);
+ /**
+ * Override the default analyzer for the properties if the class hold one
+ * That's the reason we go down the hierarchy
+ */
+ Analyzer analyzer = getAnalyzer(currClass, context);
- public Similarity getSimilarity() {
- return similarity;
- }
+ if (analyzer != null)
+ {
+ propertiesMetadata.analyzer = analyzer;
+ }
+ getAnalyzerDefs(currClass, context);
+ // Check for any ClassBridges annotation.
+ ClassBridges classBridgesAnn = currClass.getAnnotation(ClassBridges.class);
+ if (classBridgesAnn != null)
+ {
+ ClassBridge[] cbs = classBridgesAnn.value();
+ for (ClassBridge cb : cbs)
+ {
+ bindClassAnnotation(prefix, propertiesMetadata, cb, context);
+ }
+ }
- private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
- String prefix, Set<XClass> processedClasses, InitContext context) {
+ // Check for any ClassBridge style of annotations.
+ ClassBridge classBridgeAnn = currClass.getAnnotation(ClassBridge.class);
+ if (classBridgeAnn != null)
+ {
+ bindClassAnnotation(prefix, propertiesMetadata, classBridgeAnn, context);
+ }
- DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
- if ( documentIdAnn != null ) {
- if ( isRoot ) {
- if ( idKeywordName != null ) {
- throw new AssertionFailure( "Two document id assigned: "
- + idKeywordName + " and " + BinderHelper.getAttributeName( member, documentIdAnn.name() ) );
- }
- idKeywordName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
- FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager );
- if ( fieldBridge instanceof TwoWayFieldBridge ) {
- idBridge = (TwoWayFieldBridge) fieldBridge;
- }
- else {
- throw new SearchException(
- "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName() );
- }
- idBoost = getBoost( member );
- setAccessible( member );
- idGetter = member;
- }
- else {
- //component should index their document id
- setAccessible( member );
- propertiesMetadata.fieldGetters.add( member );
- String fieldName = prefix + BinderHelper.getAttributeName( member, documentIdAnn.name() );
- propertiesMetadata.fieldNames.add( fieldName );
- propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
- propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
- propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
- propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member, reflectionManager ) );
- // property > entity analyzer (no field analyzer)
- Analyzer analyzer = getAnalyzer( member, context );
- if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
- if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
- }
- {
- org.hibernate.search.annotations.Field fieldAnn =
- member.getAnnotation( org.hibernate.search.annotations.Field.class );
- if ( fieldAnn != null ) {
- bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
- }
- }
- {
- org.hibernate.search.annotations.Fields fieldsAnn =
- member.getAnnotation( org.hibernate.search.annotations.Fields.class );
- if ( fieldsAnn != null ) {
- for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value()) {
- bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
- }
- }
- }
- getAnalyzerDefs( member, context );
+ //Get similarity
+ //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
+ if (isRoot)
+ {
+ org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation(org.hibernate.search.annotations.Similarity.class);
+ if (similarityAnn != null)
+ {
+ if (similarityClass != null)
+ {
+ throw new SearchException("Multiple Similarities defined in the same class hierarchy: " + beanClass.getName());
+ }
+ similarityClass = similarityAnn.impl();
+ }
+ }
- IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
- if ( embeddedAnn != null ) {
- int oldMaxLevel = maxLevel;
- int potentialLevel = embeddedAnn.depth() + level;
- if ( potentialLevel < 0 ) {
- potentialLevel = Integer.MAX_VALUE;
- }
- maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
- level++;
+ //rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
+ // so indexing a non property does not make sense
+ List<XProperty> methods = currClass.getDeclaredProperties(XClass.ACCESS_PROPERTY);
+ for (XProperty method : methods)
+ {
+ initializeMember(method, propertiesMetadata, isRoot, prefix, processedClasses, context);
+ }
- XClass elementClass;
- if ( void.class == embeddedAnn.targetElement() ) {
- elementClass = member.getElementClass();
- }
- else {
- elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
- }
- if ( maxLevel == Integer.MAX_VALUE //infinite
- && processedClasses.contains( elementClass ) ) {
- throw new SearchException(
- "Circular reference. Duplicate use of "
- + elementClass.getName()
- + " in root entity " + beanClass.getName()
- + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
- );
- }
- if ( level <= maxLevel ) {
- processedClasses.add( elementClass ); //push
+ List<XProperty> fields = currClass.getDeclaredProperties(XClass.ACCESS_FIELD);
+ for (XProperty field : fields)
+ {
+ initializeMember(field, propertiesMetadata, isRoot, prefix, processedClasses, context);
+ }
+ }
+ if (isRoot && similarityClass != null)
+ {
+ try
+ {
+ similarity = (Similarity) similarityClass.newInstance();
+ }
+ catch (Exception e)
+ {
+ log.error("Exception attempting to instantiate Similarity '{}' set for {}",
+ similarityClass.getName(), beanClass.getName());
+ }
+ }
+ }
- setAccessible( member );
- propertiesMetadata.embeddedGetters.add( member );
- PropertiesMetadata metadata = new PropertiesMetadata();
- propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
- metadata.boost = getBoost( member );
- //property > entity analyzer
- Analyzer analyzer = getAnalyzer( member, context );
- metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
- String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
- initializeMembers( elementClass, metadata, false, localPrefix, processedClasses, context );
- /**
- * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
- */
- if ( member.isArray() ) {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
- }
- else if ( member.isCollection() ) {
- if ( Map.class.equals( member.getCollectionClass() ) ) {
- //hum subclasses etc etc??
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
- }
- else {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION );
- }
- }
- else {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
- }
+ private void getAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context)
+ {
+ AnalyzerDefs defs = annotatedElement.getAnnotation(AnalyzerDefs.class);
+ if (defs != null)
+ {
+ for (AnalyzerDef def : defs.value())
+ {
+ context.addAnalyzerDef(def);
+ }
+ }
+ AnalyzerDef def = annotatedElement.getAnnotation(AnalyzerDef.class);
+ context.addAnalyzerDef(def);
+ }
- processedClasses.remove( elementClass ); //pop
- }
- else if ( log.isTraceEnabled() ) {
- String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
- log.trace( "depth reached, ignoring {}", localPrefix );
- }
+ public String getIdentifierName()
+ {
+ return idGetter.getName();
+ }
- level--;
- maxLevel = oldMaxLevel; //set back the the old max level
- }
+ public Similarity getSimilarity()
+ {
+ return similarity;
+ }
- ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
- if ( containedAnn != null ) {
- setAccessible( member );
- propertiesMetadata.containedInGetters.add( member );
- }
- }
+ private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
+ String prefix, Set<XClass> processedClasses, InitContext context)
+ {
- private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context) {
- //FIXME name should be prefixed
- String fieldName = prefix + ann.name();
- propertiesMetadata.classNames.add( fieldName );
- propertiesMetadata.classStores.add( getStore( ann.store() ) );
- propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
- propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
- propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
- propertiesMetadata.classBoosts.add( ann.boost().value() );
+ DocumentId documentIdAnn = member.getAnnotation(DocumentId.class);
+ if (documentIdAnn != null)
+ {
+ if (isRoot)
+ {
+ if (idKeywordName != null)
+ {
+ throw new AssertionFailure("Two document id assigned: "
+ + idKeywordName + " and " + BinderHelper.getAttributeName(member, documentIdAnn.name()));
+ }
+ idKeywordName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
+ FieldBridge fieldBridge = BridgeFactory.guessType(null, member, reflectionManager);
+ if (fieldBridge instanceof TwoWayFieldBridge)
+ {
+ idBridge = (TwoWayFieldBridge) fieldBridge;
+ }
+ else
+ {
+ throw new SearchException(
+ "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName());
+ }
+ idBoost = getBoost(member);
+ setAccessible(member);
+ idGetter = member;
+ }
+ else
+ {
+ //component should index their document id
+ setAccessible(member);
+ propertiesMetadata.fieldGetters.add(member);
+ String fieldName = prefix + BinderHelper.getAttributeName(member, documentIdAnn.name());
+ propertiesMetadata.fieldNames.add(fieldName);
+ propertiesMetadata.fieldStore.add(getStore(Store.YES));
+ propertiesMetadata.fieldIndex.add(getIndex(Index.UN_TOKENIZED));
+ propertiesMetadata.fieldTermVectors.add(getTermVector(TermVector.NO));
+ propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(null, member, reflectionManager));
+ // property > entity analyzer (no field analyzer)
+ Analyzer analyzer = getAnalyzer(member, context);
+ if (analyzer == null) analyzer = propertiesMetadata.analyzer;
+ if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
+ this.analyzer.addScopedAnalyzer(fieldName, analyzer);
+ }
+ }
+ {
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation(org.hibernate.search.annotations.Field.class);
+ if (fieldAnn != null)
+ {
+ bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
+ }
+ }
+ {
+ org.hibernate.search.annotations.Fields fieldsAnn =
+ member.getAnnotation(org.hibernate.search.annotations.Fields.class);
+ if (fieldsAnn != null)
+ {
+ for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value())
+ {
+ bindFieldAnnotation(member, propertiesMetadata, prefix, fieldAnn, context);
+ }
+ }
+ }
+ getAnalyzerDefs(member, context);
- Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
- if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
- if ( analyzer == null ) throw new AssertionFailure( "Analyzer should not be undefined" );
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
+ IndexedEmbedded embeddedAnn = member.getAnnotation(IndexedEmbedded.class);
+ if (embeddedAnn != null)
+ {
+ int oldMaxLevel = maxLevel;
+ int potentialLevel = embeddedAnn.depth() + level;
+ if (potentialLevel < 0)
+ {
+ potentialLevel = Integer.MAX_VALUE;
+ }
+ maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
+ level++;
- private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context) {
- setAccessible( member );
- propertiesMetadata.fieldGetters.add( member );
- String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
- propertiesMetadata.fieldNames.add( fieldName );
- propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
- propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
- propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
- propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member, reflectionManager ) );
+ XClass elementClass;
+ if (void.class == embeddedAnn.targetElement())
+ {
+ elementClass = member.getElementClass();
+ }
+ else
+ {
+ elementClass = reflectionManager.toXClass(embeddedAnn.targetElement());
+ }
+ if (maxLevel == Integer.MAX_VALUE //infinite
+ && processedClasses.contains(elementClass))
+ {
+ throw new SearchException(
+ "Circular reference. Duplicate use of "
+ + elementClass.getName()
+ + " in root entity " + beanClass.getName()
+ + "#" + buildEmbeddedPrefix(prefix, embeddedAnn, member)
+ );
+ }
+ if (level <= maxLevel)
+ {
+ processedClasses.add(elementClass); //push
- // Field > property > entity analyzer
- Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
- if ( analyzer == null ) analyzer = getAnalyzer( member, context );
- if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
- if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be undefined" );
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
+ setAccessible(member);
+ propertiesMetadata.embeddedGetters.add(member);
+ PropertiesMetadata metadata = new PropertiesMetadata();
+ propertiesMetadata.embeddedPropertiesMetadata.add(metadata);
+ metadata.boost = getBoost(member);
+ //property > entity analyzer
+ Analyzer analyzer = getAnalyzer(member, context);
+ metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
+ String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
+ initializeMembers(elementClass, metadata, false, localPrefix, processedClasses, context);
+ /**
+ * We will only index the "expected" type but that's OK, HQL cannot do downcasting either
+ */
+ if (member.isArray())
+ {
+ propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.ARRAY);
+ }
+ else if (member.isCollection())
+ {
+ if (Map.class.equals(member.getCollectionClass()))
+ {
+ //hum subclasses etc etc??
+ propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.MAP);
+ }
+ else
+ {
+ propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.COLLECTION);
+ }
+ }
+ else
+ {
+ propertiesMetadata.embeddedContainers.add(PropertiesMetadata.Container.OBJECT);
+ }
- private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member) {
- String localPrefix = prefix;
- if ( ".".equals( embeddedAnn.prefix() ) ) {
- //default to property name
- localPrefix += member.getName() + '.';
- }
- else {
- localPrefix += embeddedAnn.prefix();
- }
- return localPrefix;
- }
+ processedClasses.remove(elementClass); //pop
+ }
+ else if (log.isTraceEnabled())
+ {
+ String localPrefix = buildEmbeddedPrefix(prefix, embeddedAnn, member);
+ log.trace("depth reached, ignoring {}", localPrefix);
+ }
- private Field.Store getStore(Store store) {
- switch ( store ) {
- case NO:
- return Field.Store.NO;
- case YES:
- return Field.Store.YES;
- case COMPRESS:
- return Field.Store.COMPRESS;
- default:
- throw new AssertionFailure( "Unexpected Store: " + store );
- }
- }
+ level--;
+ maxLevel = oldMaxLevel; //set back the the old max level
+ }
- private Field.TermVector getTermVector(TermVector vector) {
- switch ( vector ) {
- case NO:
- return Field.TermVector.NO;
- case YES:
- return Field.TermVector.YES;
- case WITH_OFFSETS:
- return Field.TermVector.WITH_OFFSETS;
- case WITH_POSITIONS:
- return Field.TermVector.WITH_POSITIONS;
- case WITH_POSITION_OFFSETS:
- return Field.TermVector.WITH_POSITIONS_OFFSETS;
- default:
- throw new AssertionFailure( "Unexpected TermVector: " + vector );
- }
- }
+ ContainedIn containedAnn = member.getAnnotation(ContainedIn.class);
+ if (containedAnn != null)
+ {
+ setAccessible(member);
+ propertiesMetadata.containedInGetters.add(member);
+ }
+ }
- private Field.Index getIndex(Index index) {
- switch ( index ) {
- case NO:
- return Field.Index.NO;
- case NO_NORMS:
- return Field.Index.NO_NORMS;
- case TOKENIZED:
- return Field.Index.TOKENIZED;
- case UN_TOKENIZED:
- return Field.Index.UN_TOKENIZED;
- default:
- throw new AssertionFailure( "Unexpected Index: " + index );
- }
- }
+ private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context)
+ {
+ //FIXME name should be prefixed
+ String fieldName = prefix + ann.name();
+ propertiesMetadata.classNames.add(fieldName);
+ propertiesMetadata.classStores.add(getStore(ann.store()));
+ propertiesMetadata.classIndexes.add(getIndex(ann.index()));
+ propertiesMetadata.classTermVectors.add(getTermVector(ann.termVector()));
+ propertiesMetadata.classBridges.add(BridgeFactory.extractType(ann));
+ propertiesMetadata.classBoosts.add(ann.boost().value());
- private Float getBoost(XAnnotatedElement element) {
- if ( element == null ) return null;
- Boost boost = element.getAnnotation( Boost.class );
- return boost != null ?
- boost.value() :
- null;
- }
+ Analyzer analyzer = getAnalyzer(ann.analyzer(), context);
+ if (analyzer == null) analyzer = propertiesMetadata.analyzer;
+ if (analyzer == null) throw new AssertionFailure("Analyzer should not be undefined");
+ this.analyzer.addScopedAnalyzer(fieldName, analyzer);
+ }
- private Object getMemberValue(Object bean, XMember getter) {
- Object value;
- try {
- value = getter.invoke( bean );
- }
- catch (Exception e) {
- throw new IllegalStateException( "Could not get property value", e );
- }
- return value;
- }
+ private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context)
+ {
+ setAccessible(member);
+ propertiesMetadata.fieldGetters.add(member);
+ String fieldName = prefix + BinderHelper.getAttributeName(member, fieldAnn.name());
+ propertiesMetadata.fieldNames.add(fieldName);
+ propertiesMetadata.fieldStore.add(getStore(fieldAnn.store()));
+ propertiesMetadata.fieldIndex.add(getIndex(fieldAnn.index()));
+ propertiesMetadata.fieldTermVectors.add(getTermVector(fieldAnn.termVector()));
+ propertiesMetadata.fieldBridges.add(BridgeFactory.guessType(fieldAnn, member, reflectionManager));
- //TODO could we use T instead of EntityClass?
- public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
- //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
- for (LuceneWork luceneWork : queue) {
- //any work on the same entity should be ignored
- if ( luceneWork.getEntityClass() == entityClass
- ) {
- Serializable currentId = luceneWork.getId();
- if ( currentId != null && currentId.equals( id ) ) { //find a way to use Type.equals(x,y)
- return;
- }
- //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
- }
+ // Field > property > entity analyzer
+ Analyzer analyzer = getAnalyzer(fieldAnn.analyzer(), context);
+ if (analyzer == null) analyzer = getAnalyzer(member, context);
+ if (analyzer == null) analyzer = propertiesMetadata.analyzer;
+ if (analyzer == null) throw new AssertionFailure("Analizer should not be undefined");
+ this.analyzer.addScopedAnalyzer(fieldName, analyzer);
+ }
- }
- boolean searchForContainers = false;
- String idInString = idBridge.objectToString( id );
- if ( workType == WorkType.ADD ) {
- Document doc = getDocument( entity, id );
- queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
- searchForContainers = true;
- }
- else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- }
- else if ( workType == WorkType.PURGE_ALL ) {
- queue.add( new PurgeAllLuceneWork( entityClass ) );
- }
- else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
- Document doc = getDocument( entity, id );
- /**
- * even with Lucene 2.1, use of indexWriter to update is not an option
- * We can only delete by term, and the index doesn't have a term that
- * uniquely identify the entry.
- * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
- * double file opening.
- */
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
- searchForContainers = true;
- }
- else if ( workType == WorkType.INDEX ) {
- Document doc = getDocument( entity, id );
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- LuceneWork work = new AddLuceneWork( id, idInString, entityClass, doc );
- work.setBatch( true );
- queue.add( work );
- searchForContainers = true;
- }
+ private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member)
+ {
+ String localPrefix = prefix;
+ if (".".equals(embeddedAnn.prefix()))
+ {
+ //default to property name
+ localPrefix += member.getName() + '.';
+ }
+ else
+ {
+ localPrefix += embeddedAnn.prefix();
+ }
+ return localPrefix;
+ }
- else {
- throw new AssertionFailure( "Unknown WorkType: " + workType );
- }
+ private Field.Store getStore(Store store)
+ {
+ switch (store)
+ {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure("Unexpected Store: " + store);
+ }
+ }
- /**
- * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
- * have to be updated)
- * When the internal object is changed, we apply the {Add|Update}Work on containedIns
- */
- if ( searchForContainers ) {
- processContainedIn( entity, queue, rootPropertiesMetadata, searchFactoryImplementor );
- }
- }
+ private Field.TermVector getTermVector(TermVector vector)
+ {
+ switch (vector)
+ {
+ case NO:
+ return Field.TermVector.NO;
+ case YES:
+ return Field.TermVector.YES;
+ case WITH_OFFSETS:
+ return Field.TermVector.WITH_OFFSETS;
+ case WITH_POSITIONS:
+ return Field.TermVector.WITH_POSITIONS;
+ case WITH_POSITION_OFFSETS:
+ return Field.TermVector.WITH_POSITIONS_OFFSETS;
+ default:
+ throw new AssertionFailure("Unexpected TermVector: " + vector);
+ }
+ }
- private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
- for (int i = 0; i < metadata.containedInGetters.size(); i++) {
- XMember member = metadata.containedInGetters.get( i );
- Object value = getMemberValue( instance, member );
- if ( value == null ) continue;
+ private Field.Index getIndex(Index index)
+ {
+ switch (index)
+ {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.TOKENIZED;
+ case UN_TOKENIZED:
+ return Field.Index.UN_TOKENIZED;
+ default:
+ throw new AssertionFailure("Unexpected Index: " + index);
+ }
+ }
- if ( member.isArray() ) {
- for (Object arrayValue : (Object[]) value) {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass( arrayValue );
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
- if ( builder == null ) continue;
- processContainedInValue( arrayValue, queue, valueClass, builder, searchFactoryImplementor );
- }
- }
- else if ( member.isCollection() ) {
- Collection collection;
- if ( Map.class.equals( member.getCollectionClass() ) ) {
- //hum
- collection = ( (Map) value ).values();
- }
- else {
- collection = (Collection) value;
- }
- for (Object collectionValue : collection) {
- //highly inneficient but safe wrt the actual targeted class
- Class valueClass = Hibernate.getClass( collectionValue );
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
- if ( builder == null ) continue;
- processContainedInValue( collectionValue, queue, valueClass, builder, searchFactoryImplementor );
- }
- }
- else {
- Class valueClass = Hibernate.getClass( value );
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( valueClass );
- if ( builder == null ) continue;
- processContainedInValue( value, queue, valueClass, builder, searchFactoryImplementor );
- }
- }
- //an embedded cannot have a useful @ContainedIn (no shared reference)
- //do not walk through them
- }
+ private Float getBoost(XAnnotatedElement element)
+ {
+ if (element == null) return null;
+ Boost boost = element.getAnnotation(Boost.class);
+ return boost != null ?
+ boost.value() :
+ null;
+ }
- private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
- DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor) {
- Serializable id = (Serializable) builder.getMemberValue( value, builder.idGetter );
- builder.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
- }
+ private Object getMemberValue(Object bean, XMember getter)
+ {
+ Object value;
+ try
+ {
+ value = getter.invoke(bean);
+ }
+ catch (Exception e)
+ {
+ throw new IllegalStateException("Could not get property value", e);
+ }
+ return value;
+ }
- public Document getDocument(T instance, Serializable id) {
- Document doc = new Document();
- XClass instanceClass = reflectionManager.toXClass( Hibernate.getClass( instance ) );
- if ( rootPropertiesMetadata.boost != null ) {
- doc.setBoost( rootPropertiesMetadata.boost );
- }
- {
- Field classField =
- new Field( CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO );
- doc.add( classField );
- LuceneOptions luceneOptions = new LuceneOptions(Field.Store.YES,
- Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost);
- idBridge.set( idKeywordName, id, doc, luceneOptions );
- }
- buildDocumentFields( instance, doc, rootPropertiesMetadata );
- return doc;
- }
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor)
+ {
+ //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
+ for (LuceneWork luceneWork : queue)
+ {
+ //any work on the same entity should be ignored
+ if (luceneWork.getEntityClass() == entityClass
+ )
+ {
+ Serializable currentId = luceneWork.getId();
+ if (currentId != null && currentId.equals(id))
+ { //find a way to use Type.equals(x,y)
+ return;
+ }
+ //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
+ }
- private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata) {
- if ( instance == null ) return;
- //needed for field access: I cannot work in the proxied version
- Object unproxiedInstance = unproxy( instance );
- for (int i = 0; i < propertiesMetadata.classBridges.size(); i++) {
- FieldBridge fb = propertiesMetadata.classBridges.get( i );
- fb.set(propertiesMetadata.classNames.get(i), unproxiedInstance,
- doc, propertiesMetadata.getClassLuceneOptions(i));
- }
- for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++) {
- XMember member = propertiesMetadata.fieldGetters.get( i );
- Object value = getMemberValue( unproxiedInstance, member );
- propertiesMetadata.fieldBridges.get(i).set(
- propertiesMetadata.fieldNames.get(i), value, doc,
- propertiesMetadata.getFieldLuceneOptions(i, getBoost(member)));
- }
- for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++) {
- XMember member = propertiesMetadata.embeddedGetters.get( i );
- Object value = getMemberValue( unproxiedInstance, member );
- //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
+ }
+ boolean searchForContainers = false;
+ String idInString = idBridge.objectToString(id);
+ if (workType == WorkType.ADD)
+ {
+ Document doc = getDocument(entity, id);
+ queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
+ searchForContainers = true;
+ }
+ else if (workType == WorkType.DELETE || workType == WorkType.PURGE)
+ {
+ queue.add(new DeleteLuceneWork(id, idInString, entityClass));
+ }
+ else if (workType == WorkType.PURGE_ALL)
+ {
+ queue.add(new PurgeAllLuceneWork(entityClass));
+ }
+ else if (workType == WorkType.UPDATE || workType == WorkType.COLLECTION)
+ {
+ Document doc = getDocument(entity, id);
+ /**
+ * even with Lucene 2.1, use of indexWriter to update is not an option
+ * We can only delete by term, and the index doesn't have a term that
+ * uniquely identify the entry.
+ * But essentially the optimization we are doing is the same Lucene is doing, the only extra cost is the
+ * double file opening.
+ */
+ queue.add(new DeleteLuceneWork(id, idInString, entityClass));
+ queue.add(new AddLuceneWork(id, idInString, entityClass, doc));
+ searchForContainers = true;
+ }
+ else if (workType == WorkType.INDEX)
+ {
+ Document doc = getDocument(entity, id);
+ queue.add(new DeleteLuceneWork(id, idInString, entityClass));
+ LuceneWork work = new AddLuceneWork(id, idInString, entityClass, doc);
+ work.setBatch(true);
+ queue.add(work);
+ searchForContainers = true;
+ }
- if ( value == null ) continue;
- PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get( i );
- switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
- case ARRAY:
- for (Object arrayValue : (Object[]) value) {
- buildDocumentFields( arrayValue, doc, embeddedMetadata );
- }
- break;
- case COLLECTION:
- for (Object collectionValue : (Collection) value) {
- buildDocumentFields( collectionValue, doc, embeddedMetadata );
- }
- break;
- case MAP:
- for (Object collectionValue : ( (Map) value ).values()) {
- buildDocumentFields( collectionValue, doc, embeddedMetadata );
- }
- break;
- case OBJECT:
- buildDocumentFields( value, doc, embeddedMetadata );
- break;
- default:
- throw new AssertionFailure( "Unknown embedded container: "
- + propertiesMetadata.embeddedContainers.get( i ) );
- }
- }
- }
+ else
+ {
+ throw new AssertionFailure("Unknown WorkType: " + workType);
+ }
- private Object unproxy(Object value) {
- //FIXME this service should be part of Core?
- if ( value instanceof HibernateProxy ) {
- // .getImplementation() initializes the data by side effect
- value = ( (HibernateProxy) value ).getHibernateLazyInitializer()
- .getImplementation();
- }
- return value;
- }
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ if (searchForContainers)
+ {
+ processContainedIn(entity, queue, rootPropertiesMetadata, searchFactoryImplementor);
+ }
+ }
- public Term getTerm(Serializable id) {
- return new Term( idKeywordName, idBridge.objectToString( id ) );
- }
+ private void processContainedIn(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor)
+ {
+ for (int i = 0; i < metadata.containedInGetters.size(); i++)
+ {
+ XMember member = metadata.containedInGetters.get(i);
+ Object value = getMemberValue(instance, member);
+ if (value == null) continue;
- public DirectoryProvider[] getDirectoryProviders() {
- return directoryProviders;
- }
+ if (member.isArray())
+ {
+ for (Object arrayValue : (Object[]) value)
+ {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass(arrayValue);
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
+ if (builder == null) continue;
+ processContainedInValue(arrayValue, queue, valueClass, builder, searchFactoryImplementor);
+ }
+ }
+ else if (member.isCollection())
+ {
+ Collection collection;
+ if (Map.class.equals(member.getCollectionClass()))
+ {
+ //hum
+ collection = ((Map) value).values();
+ }
+ else
+ {
+ collection = (Collection) value;
+ }
+ for (Object collectionValue : collection)
+ {
+ //highly inneficient but safe wrt the actual targeted class
+ Class valueClass = Hibernate.getClass(collectionValue);
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
+ if (builder == null) continue;
+ processContainedInValue(collectionValue, queue, valueClass, builder, searchFactoryImplementor);
+ }
+ }
+ else
+ {
+ Class valueClass = Hibernate.getClass(value);
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(valueClass);
+ if (builder == null) continue;
+ processContainedInValue(value, queue, valueClass, builder, searchFactoryImplementor);
+ }
+ }
+ //an embedded cannot have a useful @ContainedIn (no shared reference)
+ //do not walk through them
+ }
- public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
- return shardingStrategy;
- }
+ private void processContainedInValue(Object value, List<LuceneWork> queue, Class valueClass,
+ DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor)
+ {
+ Serializable id = (Serializable) builder.getMemberValue(value, builder.idGetter);
+ builder.addWorkToQueue(valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor);
+ }
- public Analyzer getAnalyzer() {
- return analyzer;
- }
+ public Document getDocument(T instance, Serializable id)
+ {
+ Document doc = new Document();
+ XClass instanceClass = reflectionManager.toXClass(Hibernate.getClass(instance));
+ if (rootPropertiesMetadata.boost != null)
+ {
+ doc.setBoost(rootPropertiesMetadata.boost);
+ }
+ {
+ Field classField =
+ new Field(CLASS_FIELDNAME, instanceClass.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO);
+ doc.add(classField);
+ LuceneOptions luceneOptions = new LuceneOptions(Field.Store.YES,
+ Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost);
+ idBridge.set(idKeywordName, id, doc, luceneOptions);
+ }
+ buildDocumentFields(instance, doc, rootPropertiesMetadata);
+ return doc;
+ }
- private static void setAccessible(XMember member) {
- if ( !Modifier.isPublic( member.getModifiers() ) ) {
- member.setAccessible( true );
- }
- }
+ private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata)
+ {
+ if (instance == null) return;
+ //needed for field access: I cannot work in the proxied version
+ Object unproxiedInstance = unproxy(instance);
+ for (int i = 0; i < propertiesMetadata.classBridges.size(); i++)
+ {
+ FieldBridge fb = propertiesMetadata.classBridges.get(i);
+ fb.set(propertiesMetadata.classNames.get(i), unproxiedInstance,
+ doc, propertiesMetadata.getClassLuceneOptions(i));
+ }
+ for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++)
+ {
+ XMember member = propertiesMetadata.fieldGetters.get(i);
+ Object value = getMemberValue(unproxiedInstance, member);
+ propertiesMetadata.fieldBridges.get(i).set(
+ propertiesMetadata.fieldNames.get(i), value, doc,
+ propertiesMetadata.getFieldLuceneOptions(i, getBoost(member)));
+ }
+ for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++)
+ {
+ XMember member = propertiesMetadata.embeddedGetters.get(i);
+ Object value = getMemberValue(unproxiedInstance, member);
+ //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
- public TwoWayFieldBridge getIdBridge() {
- return idBridge;
- }
+ if (value == null) continue;
+ PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get(i);
+ switch (propertiesMetadata.embeddedContainers.get(i))
+ {
+ case ARRAY:
+ for (Object arrayValue : (Object[]) value)
+ {
+ buildDocumentFields(arrayValue, doc, embeddedMetadata);
+ }
+ break;
+ case COLLECTION:
+ for (Object collectionValue : (Collection) value)
+ {
+ buildDocumentFields(collectionValue, doc, embeddedMetadata);
+ }
+ break;
+ case MAP:
+ for (Object collectionValue : ((Map) value).values())
+ {
+ buildDocumentFields(collectionValue, doc, embeddedMetadata);
+ }
+ break;
+ case OBJECT:
+ buildDocumentFields(value, doc, embeddedMetadata);
+ break;
+ default:
+ throw new AssertionFailure("Unknown embedded container: "
+ + propertiesMetadata.embeddedContainers.get(i));
+ }
+ }
+ }
- public String getIdKeywordName() {
- return idKeywordName;
- }
+ private Object unproxy(Object value)
+ {
+ //FIXME this service should be part of Core?
+ if (value instanceof HibernateProxy)
+ {
+ // .getImplementation() initializes the data by side effect
+ value = ((HibernateProxy) value).getHibernateLazyInitializer()
+ .getImplementation();
+ }
+ return value;
+ }
- public static Class getDocumentClass(Document document) {
- String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
- try {
- return ReflectHelper.classForName( className );
- }
- catch (ClassNotFoundException e) {
- throw new SearchException( "Unable to load indexed class: " + className, e );
- }
- }
+ public Term getTerm(Serializable id)
+ {
+ if (idProvided)
+ {
+ return new Term(idKeywordName, (String) id);
+ }
- public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document) {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
- if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
- return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document );
- }
+ return new Term(idKeywordName, idBridge.objectToString(id));
+ }
- public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields) {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
- if ( builder == null ) throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
- final int fieldNbr = fields.length;
- Object[] result = new Object[fieldNbr];
+ public DirectoryProvider[] getDirectoryProviders()
+ {
+ return directoryProviders;
+ }
- if ( builder.idKeywordName != null ) {
- populateResult( builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document );
- }
+ public IndexShardingStrategy getDirectoryProviderSelectionStrategy()
+ {
+ return shardingStrategy;
+ }
- final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
- processFieldsForProjection( metadata, fields, result, document );
- return result;
- }
+ public Analyzer getAnalyzer()
+ {
+ return analyzer;
+ }
- private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document) {
- final int nbrFoEntityFields = metadata.fieldNames.size();
- for (int index = 0; index < nbrFoEntityFields; index++) {
- populateResult( metadata.fieldNames.get( index ),
- metadata.fieldBridges.get( index ),
- metadata.fieldStore.get( index ),
- fields,
- result,
- document
- );
- }
- final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
- for (int index = 0; index < nbrOfEmbeddedObjects; index++) {
- //there is nothing we can do for collections
- if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT ) {
- processFieldsForProjection( metadata.embeddedPropertiesMetadata.get( index ), fields, result, document );
- }
- }
- }
+ private static void setAccessible(XMember member)
+ {
+ if (!Modifier.isPublic(member.getModifiers()))
+ {
+ member.setAccessible(true);
+ }
+ }
- private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
- String[] fields, Object[] result, Document document) {
- int matchingPosition = getFieldPosition( fields, fieldName );
- if ( matchingPosition != -1 ) {
- //TODO make use of an isTwoWay() method
- if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom( fieldBridge.getClass() ) ) {
- result[matchingPosition] = ( (TwoWayFieldBridge) fieldBridge ).get( fieldName, document );
- if ( log.isTraceEnabled() ) {
- log.trace( "Field {} projected as {}", fieldName, result[matchingPosition] );
- }
- }
- else {
- if ( store == Field.Store.NO ) {
- throw new SearchException( "Projecting an unstored field: " + fieldName );
- }
- else {
- throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass() );
- }
- }
- }
- }
+ public TwoWayFieldBridge getIdBridge()
+ {
+ return idBridge;
+ }
- private static int getFieldPosition(String[] fields, String fieldName) {
- int fieldNbr = fields.length;
- for (int index = 0; index < fieldNbr; index++) {
- if ( fieldName.equals( fields[index] ) ) return index;
- }
- return -1;
- }
+ public String getIdKeywordName()
+ {
+ return idKeywordName;
+ }
- public void postInitialize(Set<Class> indexedClasses) {
- //this method does not requires synchronization
- Class plainClass = reflectionManager.toClass( beanClass );
- Set<Class> tempMappedSubclasses = new HashSet<Class>();
- //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
- for (Class currentClass : indexedClasses) {
- if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add( currentClass );
- }
- this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
- Class superClass = plainClass.getSuperclass();
- this.isRoot = true;
- while ( superClass != null) {
- if ( indexedClasses.contains( superClass ) ) {
- this.isRoot = false;
- break;
- }
- superClass = superClass.getSuperclass();
- }
- }
+ public static Class getDocumentClass(Document document)
+ {
+ String className = document.get(DocumentBuilder.CLASS_FIELDNAME);
+ try
+ {
+ return ReflectHelper.classForName(className);
+ }
+ catch (ClassNotFoundException e)
+ {
+ throw new SearchException("Unable to load indexed class: " + className, e);
+ }
+ }
+ public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document)
+ {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
+ if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
+ return (Serializable) builder.getIdBridge().get(builder.getIdKeywordName(), document);
+ }
- public Set<Class> getMappedSubclasses() {
- return mappedSubclasses;
- }
+ public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class clazz, Document document, String[] fields)
+ {
+ DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilders().get(clazz);
+ if (builder == null) throw new SearchException("No Lucene configuration set up for: " + clazz.getName());
+ final int fieldNbr = fields.length;
+ Object[] result = new Object[fieldNbr];
- /**
- * Make sure to return false if there is a risk of composite id
- * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- */
- public boolean isSafeFromTupleId() {
- return safeFromTupleId;
- }
+ if (builder.idKeywordName != null)
+ {
+ populateResult(builder.idKeywordName, builder.idBridge, Field.Store.YES, fields, result, document);
+ }
- /**
- * Wrapper class containing all the meta data extracted out of the entities.
- */
- private static class PropertiesMetadata {
- public Float boost;
- public Analyzer analyzer;
- public final List<String> fieldNames = new ArrayList<String>();
- public final List<XMember> fieldGetters = new ArrayList<XMember>();
- public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
- public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
- public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
- public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
- public final List<XMember> embeddedGetters = new ArrayList<XMember>();
- public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
- public final List<Container> embeddedContainers = new ArrayList<Container>();
- public final List<XMember> containedInGetters = new ArrayList<XMember>();
- public final List<String> classNames = new ArrayList<String>();
- public final List<Field.Store> classStores = new ArrayList<Field.Store>();
- public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
- public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
- public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
- public final List<Float> classBoosts = new ArrayList<Float>();
+ final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
+ processFieldsForProjection(metadata, fields, result, document);
+ return result;
+ }
- public enum Container {
- OBJECT,
- COLLECTION,
- MAP,
- ARRAY
- }
-
- private LuceneOptions getClassLuceneOptions(int i) {
- LuceneOptions options = new LuceneOptions(classStores.get(i),
- classIndexes.get(i), classTermVectors.get(i), classBoosts.get(i));
- return options;
- }
-
- private LuceneOptions getFieldLuceneOptions(int i, Float boost) {
- LuceneOptions options = new LuceneOptions(fieldStore.get(i),
- fieldIndex.get(i), fieldTermVectors.get(i), boost);
- return options;
- }
- }
+ private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document)
+ {
+ final int nbrFoEntityFields = metadata.fieldNames.size();
+ for (int index = 0; index < nbrFoEntityFields; index++)
+ {
+ populateResult(metadata.fieldNames.get(index),
+ metadata.fieldBridges.get(index),
+ metadata.fieldStore.get(index),
+ fields,
+ result,
+ document
+ );
+ }
+ final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
+ for (int index = 0; index < nbrOfEmbeddedObjects; index++)
+ {
+ //there is nothing we can do for collections
+ if (metadata.embeddedContainers.get(index) == PropertiesMetadata.Container.OBJECT)
+ {
+ processFieldsForProjection(metadata.embeddedPropertiesMetadata.get(index), fields, result, document);
+ }
+ }
+ }
+
+ private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
+ String[] fields, Object[] result, Document document)
+ {
+ int matchingPosition = getFieldPosition(fields, fieldName);
+ if (matchingPosition != -1)
+ {
+ //TODO make use of an isTwoWay() method
+ if (store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom(fieldBridge.getClass()))
+ {
+ result[matchingPosition] = ((TwoWayFieldBridge) fieldBridge).get(fieldName, document);
+ if (log.isTraceEnabled())
+ {
+ log.trace("Field {} projected as {}", fieldName, result[matchingPosition]);
+ }
+ }
+ else
+ {
+ if (store == Field.Store.NO)
+ {
+ throw new SearchException("Projecting an unstored field: " + fieldName);
+ }
+ else
+ {
+ throw new SearchException("FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass());
+ }
+ }
+ }
+ }
+
+ private static int getFieldPosition(String[] fields, String fieldName)
+ {
+ int fieldNbr = fields.length;
+ for (int index = 0; index < fieldNbr; index++)
+ {
+ if (fieldName.equals(fields[index])) return index;
+ }
+ return -1;
+ }
+
+ public void postInitialize(Set<Class> indexedClasses)
+ {
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass(beanClass);
+ Set<Class> tempMappedSubclasses = new HashSet<Class>();
+ //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
+ for (Class currentClass : indexedClasses)
+ {
+ if (plainClass.isAssignableFrom(currentClass)) tempMappedSubclasses.add(currentClass);
+ }
+ this.mappedSubclasses = Collections.unmodifiableSet(tempMappedSubclasses);
+ Class superClass = plainClass.getSuperclass();
+ this.isRoot = true;
+ while (superClass != null)
+ {
+ if (indexedClasses.contains(superClass))
+ {
+ this.isRoot = false;
+ break;
+ }
+ superClass = superClass.getSuperclass();
+ }
+ }
+
+
+ public Set<Class> getMappedSubclasses()
+ {
+ return mappedSubclasses;
+ }
+
+ /**
+ * Make sure to return false if there is a risk of composite id
+ * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ */
+ public boolean isSafeFromTupleId()
+ {
+ return safeFromTupleId;
+ }
+
+ /**
+ * Wrapper class containing all the meta data extracted out of the entities.
+ */
+ private static class PropertiesMetadata
+ {
+ public Float boost;
+ public Analyzer analyzer;
+ public final List<String> fieldNames = new ArrayList<String>();
+ public final List<XMember> fieldGetters = new ArrayList<XMember>();
+ public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
+ public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+ public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
+ public final List<XMember> embeddedGetters = new ArrayList<XMember>();
+ public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
+ public final List<Container> embeddedContainers = new ArrayList<Container>();
+ public final List<XMember> containedInGetters = new ArrayList<XMember>();
+ public final List<String> classNames = new ArrayList<String>();
+ public final List<Field.Store> classStores = new ArrayList<Field.Store>();
+ public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
+ public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
+ public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
+ public final List<Float> classBoosts = new ArrayList<Float>();
+
+ public enum Container
+ {
+ OBJECT,
+ COLLECTION,
+ MAP,
+ ARRAY
+ }
+
+ private LuceneOptions getClassLuceneOptions(int i)
+ {
+ LuceneOptions options = new LuceneOptions(classStores.get(i),
+ classIndexes.get(i), classTermVectors.get(i), classBoosts.get(i));
+ return options;
+ }
+
+ private LuceneOptions getFieldLuceneOptions(int i, Float boost)
+ {
+ LuceneOptions options = new LuceneOptions(fieldStore.get(i),
+ fieldIndex.get(i), fieldTermVectors.get(i), boost);
+ return options;
+ }
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -26,6 +26,9 @@
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.transaction.EventSourceTransactionContext;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -41,109 +44,148 @@
//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
@SuppressWarnings("serial")
public class FullTextIndexEventListener implements PostDeleteEventListener,
- PostInsertEventListener, PostUpdateEventListener,
- PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
- PostCollectionUpdateEventListener, Initializable, Destructible {
+ PostInsertEventListener, PostUpdateEventListener,
+ PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener, Initializable, Destructible
+{
- private static final Logger log = LoggerFactory.getLogger(FullTextIndexEventListener.class);
-
- protected boolean used;
- protected SearchFactoryImplementor searchFactoryImplementor;
+ private static final Logger log = LoggerFactory.getLogger(FullTextIndexEventListener.class);
- public void initialize(Configuration cfg) {
- searchFactoryImplementor = SearchFactoryImpl.getSearchFactory( cfg );
- String indexingStrategy = searchFactoryImplementor.getIndexingStrategy();
- if ( "event".equals( indexingStrategy ) ) {
- used = searchFactoryImplementor.getDocumentBuilders().size() != 0;
- }
- else if ( "manual".equals( indexingStrategy ) ) {
- used = false;
- }
- }
+ protected boolean used;
+ protected SearchFactoryImplementor searchFactoryImplementor;
- public SearchFactoryImplementor getSearchFactoryImplementor() {
- return searchFactoryImplementor;
- }
+ /**
+ * Method that uses the SearchConfiguration interface as opposed to directly with Hibernate Core. This was edited
+ * for the JBoss Cache integration.
+ *
+ * @param searchConfiguration - so that it calls the same methods on the interface.
+ */
- public void onPostDelete(PostDeleteEvent event) {
- if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( event.getEntity().getClass() ) ) {
- processWork( event.getEntity(), event.getId(), WorkType.DELETE, event );
- }
- }
+ public void initialize(SearchConfiguration searchConfiguration)
+ {
- public void onPostInsert(PostInsertEvent event) {
- if (used) {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entity.getClass() );
- //not strictly necessary but a small optimization
- if ( builder != null ) {
- Serializable id = event.getId();
- processWork( entity, id, WorkType.ADD, event );
- }
- }
- }
+ searchFactoryImplementor = SearchFactoryImpl.getSearchFactory(searchConfiguration);
+ String indexingStrategy = searchFactoryImplementor.getIndexingStrategy();
+ if ("event".equals(indexingStrategy))
+ {
+ used = searchFactoryImplementor.getDocumentBuilders().size() != 0;
+ }
+ else if ("manual".equals(indexingStrategy))
+ {
+ used = false;
+ }
+ }
- public void onPostUpdate(PostUpdateEvent event) {
- if (used) {
- final Object entity = event.getEntity();
- //not strictly necessary but a small optimization
- DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entity.getClass() );
- if ( builder != null ) {
- Serializable id = event.getId();
- processWork( entity, id, WorkType.UPDATE, event );
- }
- }
- }
+ public void initialize(Configuration cfg)
+ {
+ SearchConfiguration searchConfig = new SearchConfigurationFromHibernateCore(cfg);
+ initialize(searchConfig);
- protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
- Work work = new Work(entity, id, workType);
- searchFactoryImplementor.getWorker().performWork( work, event.getSession() );
- }
+ }
- public void cleanup() {
- searchFactoryImplementor.close();
- }
-
- public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
- processCollectionEvent( event );
- }
-
- public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
- processCollectionEvent( event );
- }
+ public SearchFactoryImplementor getSearchFactoryImplementor()
+ {
+ return searchFactoryImplementor;
+ }
- public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
- processCollectionEvent( event );
- }
-
- protected void processCollectionEvent(AbstractCollectionEvent event) {
- Object entity = event.getAffectedOwnerOrNull();
- if ( entity == null ) {
- //Hibernate cannot determine every single time the owner especially in case detached objects are involved
- // or property-ref is used
- //Should log really but we don't know if we're interested in this collection for indexing
- return;
- }
- if ( used && searchFactoryImplementor.getDocumentBuilders().containsKey( entity.getClass() ) ) {
- Serializable id = getId( entity, event );
- if (id == null) {
- log.warn(
- "Unable to reindex entity on collection change, id cannot be extracted: {}",
- event.getAffectedOwnerEntityName()
- );
- return;
- }
- processWork( entity, id, WorkType.COLLECTION, event );
- }
- }
+ public void onPostDelete(PostDeleteEvent event)
+ {
+ if (used && searchFactoryImplementor.getDocumentBuilders().containsKey(event.getEntity().getClass()))
+ {
+ processWork(event.getEntity(), event.getId(), WorkType.DELETE, event);
+ }
+ }
- private Serializable getId(Object entity, AbstractCollectionEvent event) {
- Serializable id = event.getAffectedOwnerIdOrNull();
- if ( id == null ) {
- //most likely this recovery is unnecessary since Hibernate Core probably try that
- EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( entity );
- id = entityEntry == null ? null : entityEntry.getId();
- }
- return id;
- }
+ public void onPostInsert(PostInsertEvent event)
+ {
+ if (used)
+ {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get(entity.getClass());
+ //not strictly necessary but a small optimization
+ if (builder != null)
+ {
+ Serializable id = event.getId();
+ processWork(entity, id, WorkType.ADD, event);
+ }
+ }
+ }
+
+ public void onPostUpdate(PostUpdateEvent event)
+ {
+ if (used)
+ {
+ final Object entity = event.getEntity();
+ //not strictly necessary but a small optimization
+ DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get(entity.getClass());
+ if (builder != null)
+ {
+ Serializable id = event.getId();
+ processWork(entity, id, WorkType.UPDATE, event);
+ }
+ }
+ }
+
+ protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event)
+ {
+ Work work = new Work(entity, id, workType);
+ searchFactoryImplementor.getWorker().performWork(work, new EventSourceTransactionContext(event.getSession()));
+ }
+
+ public void cleanup()
+ {
+ searchFactoryImplementor.close();
+ }
+
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event)
+ {
+ processCollectionEvent(event);
+ }
+
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event)
+ {
+ processCollectionEvent(event);
+ }
+
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event)
+ {
+ processCollectionEvent(event);
+ }
+
+ protected void processCollectionEvent(AbstractCollectionEvent event)
+ {
+ Object entity = event.getAffectedOwnerOrNull();
+ if (entity == null)
+ {
+ //Hibernate cannot determine every single time the owner especially in case detached objects are involved
+ // or property-ref is used
+ //Should log really but we don't know if we're interested in this collection for indexing
+ return;
+ }
+ if (used && searchFactoryImplementor.getDocumentBuilders().containsKey(entity.getClass()))
+ {
+ Serializable id = getId(entity, event);
+ if (id == null)
+ {
+ log.warn(
+ "Unable to reindex entity on collection change, id cannot be extracted: {}",
+ event.getAffectedOwnerEntityName()
+ );
+ return;
+ }
+ processWork(entity, id, WorkType.COLLECTION, event);
+ }
+ }
+
+ private Serializable getId(Object entity, AbstractCollectionEvent event)
+ {
+ Serializable id = event.getAffectedOwnerIdOrNull();
+ if (id == null)
+ {
+ //most likely this recovery is unnecessary since Hibernate Core probably try that
+ EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry(entity);
+ id = entityEntry == null ? null : entityEntry.getId();
+ }
+ return id;
+ }
}
Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -43,6 +43,8 @@
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.SearchFactory;
+import org.hibernate.search.transaction.TransactionContext;
+import org.hibernate.search.transaction.EventSourceTransactionContext;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.engine.DocumentBuilder;
@@ -62,13 +64,14 @@
@SuppressWarnings({"serial", "unchecked"})
public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
private final Session session;
- private final EventSource eventSource;
private final SessionImplementor sessionImplementor;
private transient SearchFactoryImplementor searchFactory;
+ private final TransactionContext transactionContext;
- public FullTextSessionImpl(org.hibernate.Session session) {
+
+ public FullTextSessionImpl(org.hibernate.Session session) {
this.session = (Session) session;
- this.eventSource = (EventSource) session;
+ this.transactionContext = new EventSourceTransactionContext((EventSource) session);
this.sessionImplementor = (SessionImplementor) session;
}
@@ -94,7 +97,7 @@
public void flushToIndexes() {
SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
- searchFactoryImplementor.getWorker().flushWorks(eventSource);
+ searchFactoryImplementor.getWorker().flushWorks(transactionContext);
}
/**
@@ -124,7 +127,7 @@
type = WorkType.PURGE;
}
Work work = new Work(entityType, id, type);
- searchFactoryImplementor.getWorker().performWork( work, eventSource );
+ searchFactoryImplementor.getWorker().performWork( work, transactionContext );
}
/**
@@ -147,7 +150,7 @@
}
Serializable id = session.getIdentifier( entity );
Work work = new Work(entity, id, WorkType.INDEX);
- searchFactoryImplementor.getWorker().performWork( work, eventSource );
+ searchFactoryImplementor.getWorker().performWork( work, transactionContext );
//TODO
//need to add elements in a queue kept at the Session level
Modified: search/trunk/src/java/org/hibernate/search/impl/InitContext.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/InitContext.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/impl/InitContext.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -13,8 +13,8 @@
import org.hibernate.search.annotations.TokenFilterDef;
import org.hibernate.search.SearchException;
import org.hibernate.search.Environment;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.util.DelegateNamedAnalyzer;
-import org.hibernate.cfg.Configuration;
import org.hibernate.util.ReflectHelper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -32,7 +32,7 @@
private final Analyzer defaultAnalyzer;
private final Similarity defaultSimilarity;
- public InitContext(Configuration cfg) {
+ public InitContext(SearchConfiguration cfg) {
defaultAnalyzer = initAnalyzer(cfg);
defaultSimilarity = initSimilarity(cfg);
}
@@ -63,7 +63,7 @@
* The current configuration.
* @return The Lucene analyzer to use for tokenisation.
*/
- private Analyzer initAnalyzer(Configuration cfg) {
+ private Analyzer initAnalyzer(SearchConfiguration cfg) {
Class analyzerClass;
String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS);
if (analyzerClassName != null) {
@@ -93,7 +93,7 @@
/**
* Initializes the Lucene similarity to use
*/
- private Similarity initSimilarity(Configuration cfg) {
+ private Similarity initSimilarity(SearchConfiguration cfg) {
Class similarityClass;
String similarityClassName = cfg.getProperty(Environment.SIMILARITY_CLASS);
if (similarityClassName != null) {
Modified: search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -22,11 +22,11 @@
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
import org.hibernate.annotations.common.util.StringHelper;
-import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
import org.hibernate.search.Version;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.annotations.Factory;
import org.hibernate.search.annotations.FullTextFilterDef;
import org.hibernate.search.annotations.FullTextFilterDefs;
@@ -58,8 +58,8 @@
*/
@SuppressWarnings("unchecked")
public class SearchFactoryImpl implements SearchFactoryImplementor {
- private static final ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>> contexts =
- new ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>>();
+ private static final ThreadLocal<WeakHashMap<SearchConfiguration, SearchFactoryImpl>> contexts =
+ new ThreadLocal<WeakHashMap<SearchConfiguration, SearchFactoryImpl>>();
static {
Version.touch();
@@ -96,7 +96,7 @@
}
@SuppressWarnings( "unchecked" )
- public SearchFactoryImpl(Configuration cfg) {
+ public SearchFactoryImpl(SearchConfiguration cfg) {
//yuk
ReflectionManager reflectionManager = getReflectionManager( cfg );
this.indexingStrategy = defineIndexingStrategy( cfg ); //need to be done before the document builds
@@ -113,7 +113,7 @@
this.cachingWrapperFilterSize = ConfigurationParseHelper.getIntValue( cfg.getProperties(), Environment.CACHING_WRAPPER_FILTER_SIZE, CachingWrapperFilter.DEFAULT_SIZE );
}
- private static String defineIndexingStrategy(Configuration cfg) {
+ private static String defineIndexingStrategy(SearchConfiguration cfg) {
String indexingStrategy = cfg.getProperties().getProperty( Environment.INDEXING_STRATEGY, "event" );
if ( ! ("event".equals( indexingStrategy ) || "manual".equals( indexingStrategy ) ) ) {
throw new SearchException( Environment.INDEXING_STRATEGY + " unknown: " + indexingStrategy );
@@ -218,10 +218,10 @@
//code doesn't have to be multithreaded because SF creation is not.
//this is not a public API, should really only be used during the SessionFActory building
//FIXME this is ugly, impl.staticmethod, fix that
- public static SearchFactoryImpl getSearchFactory(Configuration cfg) {
- WeakHashMap<Configuration, SearchFactoryImpl> contextMap = contexts.get();
+ public static SearchFactoryImpl getSearchFactory(SearchConfiguration cfg) {
+ WeakHashMap<SearchConfiguration, SearchFactoryImpl> contextMap = contexts.get();
if ( contextMap == null ) {
- contextMap = new WeakHashMap<Configuration, SearchFactoryImpl>( 2 );
+ contextMap = new WeakHashMap<SearchConfiguration, SearchFactoryImpl>( 2 );
contexts.set( contextMap );
}
SearchFactoryImpl searchFactory = contextMap.get( cfg );
@@ -271,7 +271,7 @@
}
//not happy about having it as a helper class but I don't want cfg to be associated with the SearchFactory
- public static ReflectionManager getReflectionManager(Configuration cfg) {
+ public static ReflectionManager getReflectionManager(SearchConfiguration cfg) {
ReflectionManager reflectionManager;
try {
//TODO introduce a ReflectionManagerHolder interface to avoid reflection
@@ -314,7 +314,7 @@
return analyzer;
}
- private void initDocumentBuilders(Configuration cfg, ReflectionManager reflectionManager) {
+ private void initDocumentBuilders(SearchConfiguration cfg, ReflectionManager reflectionManager) {
InitContext context = new InitContext( cfg );
Iterator iter = cfg.getClassMappings();
DirectoryProviderFactory factory = new DirectoryProviderFactory();
Modified: search/trunk/src/java/org/hibernate/search/reader/ReaderProviderFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/reader/ReaderProviderFactory.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/reader/ReaderProviderFactory.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -4,9 +4,9 @@
import java.util.Map;
import java.util.Properties;
-import org.hibernate.cfg.Configuration;
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
@@ -16,7 +16,7 @@
*/
public abstract class ReaderProviderFactory {
- private static Properties getProperties(Configuration cfg) {
+ private static Properties getProperties(SearchConfiguration cfg) {
Properties props = cfg.getProperties();
Properties workerProperties = new Properties();
for (Map.Entry entry : props.entrySet()) {
@@ -28,7 +28,7 @@
return workerProperties;
}
- public static ReaderProvider createReaderProvider(Configuration cfg, SearchFactoryImplementor searchFactoryImplementor) {
+ public static ReaderProvider createReaderProvider(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor) {
Properties props = getProperties( cfg );
String impl = props.getProperty( Environment.READER_STRATEGY );
ReaderProvider readerProvider;
Modified: search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-14 16:11:15 UTC (rev 14927)
+++ search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -8,9 +8,9 @@
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.annotations.common.reflection.XClass;
-import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.search.SearchException;
+import org.hibernate.search.cfg.SearchConfiguration;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.LuceneIndexingParameters;
import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
@@ -28,8 +28,8 @@
* <p/>
* Lucene directory providers are configured through properties
* <ul>
- * <li>hibernate.search.default.* and</li>
- * <li>hibernate.search.<indexname>.*</li>
+ * <li>hibernate.search.default.* and</li>
+ * <li>hibernate.search.<indexname>.*</li>
* </ul>
* <p/>
* <indexname> properties have precedence over default
@@ -44,229 +44,272 @@
* @author Hardy Ferentschik
* @author Sanne Grinovero
*/
-public class DirectoryProviderFactory {
-
- private final List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
- private static final String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
-
- private static final String SHARDING_STRATEGY = "sharding_strategy";
- private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
+public class DirectoryProviderFactory
+{
- public DirectoryProviders createDirectoryProviders(XClass entity, Configuration cfg,
- SearchFactoryImplementor searchFactoryImplementor,
- ReflectionManager reflectionManager) {
- //get properties
- String directoryProviderName = getDirectoryProviderName( entity, cfg );
- Properties[] indexProps = getDirectoryProperties( cfg, directoryProviderName );
+ private final List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
+ private static final String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
- //set up the directories
- int nbrOfProviders = indexProps.length;
- DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
- for ( int index = 0 ; index < nbrOfProviders ; index++ ) {
- String providerName = nbrOfProviders > 1 ?
- directoryProviderName + "." + index :
- directoryProviderName;
- providers[index] = createDirectoryProvider( providerName, indexProps[index],
- reflectionManager.toClass( entity ), searchFactoryImplementor );
- }
+ private static final String SHARDING_STRATEGY = "sharding_strategy";
+ private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
- //define sharding strategy
- IndexShardingStrategy shardingStrategy;
- //any indexProperty will do, the indexProps[0] surely exists.
- String shardingStrategyName = indexProps[0].getProperty( SHARDING_STRATEGY );
- if ( shardingStrategyName == null) {
- if ( indexProps.length == 1 ) {
- shardingStrategy = new NotShardedStrategy();
- }
- else {
- shardingStrategy = new IdHashShardingStrategy();
- }
- }
- else {
- try {
- Class shardigStrategyClass = ReflectHelper.classForName( shardingStrategyName, this.getClass() );
- shardingStrategy = (IndexShardingStrategy) shardigStrategyClass.newInstance();
- }
- catch (ClassNotFoundException e) {
- throw new SearchException("Unable to find ShardingStrategy class " + shardingStrategyName + " for " + directoryProviderName, e);
- }
- catch (IllegalAccessException e) {
- throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
- + " Be sure to have a no-arg constructor", e);
- }
- catch (InstantiationException e) {
- throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
- + " Be sure to have a no-arg constructor", e);
- }
- catch (ClassCastException e) {
- throw new SearchException("ShardingStrategy class does not implements DirecotryProviderShardingStrategy: "
- + shardingStrategyName, e);
- }
- }
- shardingStrategy.initialize(
- new MaskedProperty( indexProps[0], SHARDING_STRATEGY ), providers );
- return new DirectoryProviders( shardingStrategy, providers );
- }
+ public DirectoryProviders createDirectoryProviders(XClass entity, SearchConfiguration cfg,
+ SearchFactoryImplementor searchFactoryImplementor,
+ ReflectionManager reflectionManager)
+ {
+ //get properties
+ String directoryProviderName = getDirectoryProviderName(entity, cfg);
+ Properties[] indexProps = getDirectoryProperties(cfg, directoryProviderName);
- public void startDirectoryProviders() {
- for ( DirectoryProvider provider : providers ) {
- provider.start();
- }
- }
+ //set up the directories
+ int nbrOfProviders = indexProps.length;
+ DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
+ for (int index = 0; index < nbrOfProviders; index++)
+ {
+ String providerName = nbrOfProviders > 1 ?
+ directoryProviderName + "." + index :
+ directoryProviderName;
+ providers[index] = createDirectoryProvider(providerName, indexProps[index],
+ reflectionManager.toClass(entity), searchFactoryImplementor);
+ }
- private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps,
- Class entity, SearchFactoryImplementor searchFactoryImplementor) {
- String className = indexProps.getProperty( "directory_provider" );
- if ( StringHelper.isEmpty( className ) ) {
- className = DEFAULT_DIRECTORY_PROVIDER;
- }
- DirectoryProvider<?> provider;
- try {
- @SuppressWarnings( "unchecked" )
- Class<DirectoryProvider> directoryClass = ReflectHelper.classForName(
- className, DirectoryProviderFactory.class
- );
- provider = directoryClass.newInstance();
- }
- catch (Exception e) {
- throw new SearchException( "Unable to instantiate directory provider: " + className, e );
- }
- try {
- provider.initialize( directoryProviderName, indexProps, searchFactoryImplementor );
- }
- catch (Exception e) {
- throw new SearchException( "Unable to initialize: " + directoryProviderName, e );
- }
- int index = providers.indexOf( provider );
- if ( index != -1 ) {
- //share the same Directory provider for the same underlying store
- final DirectoryProvider<?> directoryProvider = providers.get( index );
- searchFactoryImplementor.addClassToDirectoryProvider(entity, directoryProvider);
- return directoryProvider;
- }
- else {
- configureOptimizerStrategy( searchFactoryImplementor, indexProps, provider );
- configureIndexingParameters( searchFactoryImplementor, indexProps, provider );
- providers.add( provider );
- searchFactoryImplementor.addClassToDirectoryProvider(entity, provider);
- if ( ! searchFactoryImplementor.getDirectoryProviders().contains( provider ) ) {
- searchFactoryImplementor.addDirectoryProvider( provider );
- }
- return provider;
- }
- }
+ //define sharding strategy
+ IndexShardingStrategy shardingStrategy;
+ //any indexProperty will do, the indexProps[0] surely exists.
+ String shardingStrategyName = indexProps[0].getProperty(SHARDING_STRATEGY);
+ if (shardingStrategyName == null)
+ {
+ if (indexProps.length == 1)
+ {
+ shardingStrategy = new NotShardedStrategy();
+ }
+ else
+ {
+ shardingStrategy = new IdHashShardingStrategy();
+ }
+ }
+ else
+ {
+ try
+ {
+ Class shardigStrategyClass = ReflectHelper.classForName(shardingStrategyName, this.getClass());
+ shardingStrategy = (IndexShardingStrategy) shardigStrategyClass.newInstance();
+ }
+ catch (ClassNotFoundException e)
+ {
+ throw new SearchException("Unable to find ShardingStrategy class " + shardingStrategyName + " for " + directoryProviderName, e);
+ }
+ catch (IllegalAccessException e)
+ {
+ throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
+ + " Be sure to have a no-arg constructor", e);
+ }
+ catch (InstantiationException e)
+ {
+ throw new SearchException("Unable to create instance of ShardingStrategy class " + shardingStrategyName
+ + " Be sure to have a no-arg constructor", e);
+ }
+ catch (ClassCastException e)
+ {
+ throw new SearchException("ShardingStrategy class does not implements DirecotryProviderShardingStrategy: "
+ + shardingStrategyName, e);
+ }
+ }
+ shardingStrategy.initialize(
+ new MaskedProperty(indexProps[0], SHARDING_STRATEGY), providers);
+ return new DirectoryProviders(shardingStrategy, providers);
+ }
- private void configureOptimizerStrategy(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider) {
- boolean incremental = indexProps.containsKey( "optimizer.operation_limit.max" )
- || indexProps.containsKey( "optimizer.transaction_limit.max" );
- OptimizerStrategy optimizerStrategy;
- if (incremental) {
- optimizerStrategy = new IncrementalOptimizerStrategy();
- optimizerStrategy.initialize( provider, indexProps, searchFactoryImplementor );
- }
- else {
- optimizerStrategy = new NoOpOptimizerStrategy();
- }
- searchFactoryImplementor.addOptimizerStrategy( provider, optimizerStrategy );
- }
-
- /**
- * Creates a new <code>LuceneIndexingParameters</code> instance for the specified provider.
- * If there are no matching properties in the configuration default values will be applied.
- * <p>
- * NOTE:</br>
- * If a non batch value is set in the configuration apply it also to the
- * batch mode. This covers the case where users only specify
- * parameters for the non batch mode. In this case the same parameters apply for
- * batch indexing. Parameters are found "depth-first": if a batch parameter is set
- * in a global scope it will take priority on local transaction parameters.
- * </p>
- *
- * @param searchFactoryImplementor the search factory.
- * @param directoryProperties The properties extracted from the configuration.
- * @param provider The directory provider for which to configure the indexing parameters.
- */
- private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor,
- Properties directoryProperties, DirectoryProvider<?> provider) {
- LuceneIndexingParameters indexingParams = new LuceneIndexingParameters( directoryProperties );
- searchFactoryImplementor.addIndexingParameters( provider, indexingParams );
- }
+ public void startDirectoryProviders()
+ {
+ for (DirectoryProvider provider : providers)
+ {
+ provider.start();
+ }
+ }
- /**
- * Returns an array of directory properties
- * Properties are defaulted. For a given property name,
- * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search.default
- * If the Index is not sharded, a single Properties is returned
- * If the index is sharded, the Properties index matches the shard index
- */
- private static Properties[] getDirectoryProperties(Configuration cfg, String directoryProviderName) {
- Properties rootCfg = new MaskedProperty( cfg.getProperties(), "hibernate.search" );
- Properties globalProperties = new MaskedProperty( rootCfg, "default" );
- Properties directoryLocalProperties = new MaskedProperty( rootCfg, directoryProviderName, globalProperties );
- final String shardsCountValue = directoryLocalProperties.getProperty( NBR_OF_SHARDS );
- if ( shardsCountValue == null ) {
- // no shards: finished.
- return new Properties[] { directoryLocalProperties };
- } else {
- // count shards
- int shardsCount = ConfigurationParseHelper.parseInt( shardsCountValue, shardsCountValue + " is not a number" );
- // create shard-specific Props
- Properties[] shardLocalProperties = new Properties[shardsCount];
- for ( int i = 0; i < shardsCount; i++ ) {
- shardLocalProperties[i] = new MaskedProperty(
- directoryLocalProperties, Integer.toString(i), directoryLocalProperties );
- }
- return shardLocalProperties;
- }
- }
+ private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps,
+ Class entity, SearchFactoryImplementor searchFactoryImplementor)
+ {
+ String className = indexProps.getProperty("directory_provider");
+ if (StringHelper.isEmpty(className))
+ {
+ className = DEFAULT_DIRECTORY_PROVIDER;
+ }
+ DirectoryProvider<?> provider;
+ try
+ {
+ @SuppressWarnings("unchecked")
+ Class<DirectoryProvider> directoryClass = ReflectHelper.classForName(
+ className, DirectoryProviderFactory.class
+ );
+ provider = directoryClass.newInstance();
+ }
+ catch (Exception e)
+ {
+ throw new SearchException("Unable to instantiate directory provider: " + className, e);
+ }
+ try
+ {
+ provider.initialize(directoryProviderName, indexProps, searchFactoryImplementor);
+ }
+ catch (Exception e)
+ {
+ throw new SearchException("Unable to initialize: " + directoryProviderName, e);
+ }
+ int index = providers.indexOf(provider);
+ if (index != -1)
+ {
+ //share the same Directory provider for the same underlying store
+ final DirectoryProvider<?> directoryProvider = providers.get(index);
+ searchFactoryImplementor.addClassToDirectoryProvider(entity, directoryProvider);
+ return directoryProvider;
+ }
+ else
+ {
+ configureOptimizerStrategy(searchFactoryImplementor, indexProps, provider);
+ configureIndexingParameters(searchFactoryImplementor, indexProps, provider);
+ providers.add(provider);
+ searchFactoryImplementor.addClassToDirectoryProvider(entity, provider);
+ if (!searchFactoryImplementor.getDirectoryProviders().contains(provider))
+ {
+ searchFactoryImplementor.addDirectoryProvider(provider);
+ }
+ return provider;
+ }
+ }
- private static String getDirectoryProviderName(XClass clazz, Configuration cfg) {
- //yuk
- ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
- //get the most specialized (ie subclass > superclass) non default index name
- //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
- //FIXME I'm inclined to get rid of the default value
- PersistentClass pc = cfg.getClassMapping( clazz.getName() );
- XClass rootIndex = null;
- do {
- XClass currentClazz = reflectionManager.toXClass( pc.getMappedClass() );
- Indexed indexAnn = currentClazz.getAnnotation( Indexed.class );
- if ( indexAnn != null ) {
- if ( indexAnn.index().length() != 0 ) {
- return indexAnn.index();
- }
- else {
- rootIndex = currentClazz;
- }
- }
- pc = pc.getSuperclass();
- }
- while ( pc != null );
- //there is nobody out there with a non default @Indexed.index
- if ( rootIndex != null ) {
- return rootIndex.getName();
- }
- else {
- throw new SearchException(
- "Trying to extract the index name from a non @Indexed class: " + clazz.getName() );
- }
- }
+ private void configureOptimizerStrategy(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider)
+ {
+ boolean incremental = indexProps.containsKey("optimizer.operation_limit.max")
+ || indexProps.containsKey("optimizer.transaction_limit.max");
+ OptimizerStrategy optimizerStrategy;
+ if (incremental)
+ {
+ optimizerStrategy = new IncrementalOptimizerStrategy();
+ optimizerStrategy.initialize(provider, indexProps, searchFactoryImplementor);
+ }
+ else
+ {
+ optimizerStrategy = new NoOpOptimizerStrategy();
+ }
+ searchFactoryImplementor.addOptimizerStrategy(provider, optimizerStrategy);
+ }
- public static class DirectoryProviders {
- private final IndexShardingStrategy shardingStrategy;
- private final DirectoryProvider[] providers;
+ /**
+ * Creates a new <code>LuceneIndexingParameters</code> instance for the specified provider.
+ * If there are no matching properties in the configuration default values will be applied.
+ * <p>
+ * NOTE:</br>
+ * If a non batch value is set in the configuration apply it also to the
+ * batch mode. This covers the case where users only specify
+ * parameters for the non batch mode. In this case the same parameters apply for
+ * batch indexing. Parameters are found "depth-first": if a batch parameter is set
+ * in a global scope it will take priority on local transaction parameters.
+ * </p>
+ *
+ * @param searchFactoryImplementor the search factory.
+ * @param directoryProperties The properties extracted from the configuration.
+ * @param provider The directory provider for which to configure the indexing parameters.
+ */
+ private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor,
+ Properties directoryProperties, DirectoryProvider<?> provider)
+ {
+ LuceneIndexingParameters indexingParams = new LuceneIndexingParameters(directoryProperties);
+ searchFactoryImplementor.addIndexingParameters(provider, indexingParams);
+ }
- public DirectoryProviders(IndexShardingStrategy shardingStrategy, DirectoryProvider[] providers) {
- this.shardingStrategy = shardingStrategy;
- this.providers = providers;
- }
+ /**
+ * Returns an array of directory properties
+ * Properties are defaulted. For a given property name,
+ * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search.default
+ * If the Index is not sharded, a single Properties is returned
+ * If the index is sharded, the Properties index matches the shard index
+ */
+ private static Properties[] getDirectoryProperties(SearchConfiguration cfg, String directoryProviderName)
+ {
+ Properties rootCfg = new MaskedProperty(cfg.getProperties(), "hibernate.search");
+ Properties globalProperties = new MaskedProperty(rootCfg, "default");
+ Properties directoryLocalProperties = new MaskedProperty(rootCfg, directoryProviderName, globalProperties);
+ final String shardsCountValue = directoryLocalProperties.getProperty(NBR_OF_SHARDS);
+ if (shardsCountValue == null)
+ {
+ // no shards: finished.
+ return new Properties[]{directoryLocalProperties};
+ }
+ else
+ {
+ // count shards
+ int shardsCount = ConfigurationParseHelper.parseInt(shardsCountValue, shardsCountValue + " is not a number");
+ // create shard-specific Props
+ Properties[] shardLocalProperties = new Properties[shardsCount];
+ for (int i = 0; i < shardsCount; i++)
+ {
+ shardLocalProperties[i] = new MaskedProperty(
+ directoryLocalProperties, Integer.toString(i), directoryLocalProperties);
+ }
+ return shardLocalProperties;
+ }
+ }
- public IndexShardingStrategy getSelectionStrategy() {
- return shardingStrategy;
- }
+ private static String getDirectoryProviderName(XClass clazz, SearchConfiguration cfg)
+ {
+ //yuk
+ ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
+ //get the most specialized (ie subclass > superclass) non default index name
+ //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
+ //FIXME I'm inclined to get rid of the default value
+ Class aClass = cfg.getClassMapping(clazz.getName());
+ XClass rootIndex = null;
+ do
+ {
+ XClass currentClazz = reflectionManager.toXClass(aClass);
+ Indexed indexAnn = currentClazz.getAnnotation(Indexed.class);
+ if (indexAnn != null)
+ {
+ if (indexAnn.index().length() != 0)
+ {
+ return indexAnn.index();
+ }
+ else
+ {
+ rootIndex = currentClazz;
+ }
+ }
+ aClass = aClass.getSuperclass();
+ }
+ while (aClass != null);
+ //there is nobody out there with a non default @Indexed.index
+ if (rootIndex != null)
+ {
+ return rootIndex.getName();
+ }
+ else
+ {
+ throw new SearchException(
+ "Trying to extract the index name from a non @Indexed class: " + clazz.getName());
+ }
+ }
- public DirectoryProvider[] getProviders() {
- return providers;
+ public static class DirectoryProviders
+ {
+ private final IndexShardingStrategy shardingStrategy;
+ private final DirectoryProvider[] providers;
+
+ public DirectoryProviders(IndexShardingStrategy shardingStrategy, DirectoryProvider[] providers)
+ {
+ this.shardingStrategy = shardingStrategy;
+ this.providers = providers;
+ }
+
+ public IndexShardingStrategy getSelectionStrategy()
+ {
+ return shardingStrategy;
+ }
+
+ public DirectoryProvider[] getProviders()
+ {
+ return providers;
}
}
Added: search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/transaction/EventSourceTransactionContext.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -0,0 +1,36 @@
+package org.hibernate.search.transaction;
+
+import org.hibernate.Transaction;
+import org.hibernate.event.EventSource;
+
+import javax.transaction.Synchronization;
+
+/**
+ at author Navin Surtani - navin at surtani.org
+ */
+public class EventSourceTransactionContext implements TransactionContext
+{
+ EventSource eventSource;
+
+ public EventSourceTransactionContext(EventSource eventSource)
+ {
+ this.eventSource = eventSource;
+ }
+
+ public Object getTransactionIdentifier()
+ {
+ return eventSource.getTransaction();
+ }
+
+ public void registerSynchronization(Synchronization synchronization)
+ {
+ Transaction transaction = eventSource.getTransaction();
+ transaction.registerSynchronization(synchronization);
+ }
+
+ public boolean isTxInProgress()
+ {
+ return eventSource.isTransactionInProgress();
+ }
+
+}
Added: search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/transaction/TransactionContext.java 2008-07-15 08:47:20 UTC (rev 14928)
@@ -0,0 +1,22 @@
+package org.hibernate.search.transaction;
+
+import javax.transaction.Synchronization;
+
+/**
+ at author Navin Surtani - navin at surtani.org
+ */
+public interface TransactionContext
+{
+ /**
+ *@return A boolean whether a transaction is in progress or not.
+ */
+ public boolean isTxInProgress();
+
+ /**
+ *
+ * @return a transaction object.
+ */
+ public Object getTransactionIdentifier();
+
+ public void registerSynchronization(Synchronization synchronization);
+}
More information about the hibernate-commits
mailing list