Author: hardy.ferentschik
Date: 2008-11-23 05:48:59 -0500 (Sun, 23 Nov 2008)
New Revision: 15605
Added:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
search/trunk/src/java/org/hibernate/search/util/ReflectionHelper.java
Removed:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/src/java/org/hibernate/search/util/BinderHelper.java
Modified:
search/trunk/src/java/org/hibernate/search/backend/Workspace.java
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
search/trunk/src/java/org/hibernate/search/engine/MultiClassesQueryLoader.java
search/trunk/src/java/org/hibernate/search/engine/ObjectLoaderHelper.java
search/trunk/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
search/trunk/src/test/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java
Log:
HSEARCH-285
Split DocumentBuilder into DocumentBuilderConstainedEntity and
DocumentBuilderIndexedEntity.
Modified: search/trunk/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-11-21 13:58:05
UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-11-23 10:48:59
UTC (rev 15605)
@@ -16,7 +16,7 @@
import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.search.SearchException;
import org.hibernate.search.SearchFactory;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.optimization.OptimizerStrategy;
@@ -77,8 +77,8 @@
this.lock = searchFactoryImplementor.getDirectoryProviderLock( provider );
}
- public <T> DocumentBuilder<T> getDocumentBuilder(Class<T> entity) {
- return searchFactoryImplementor.getDocumentBuilder( entity );
+ public <T> DocumentBuilderIndexedEntity<T> getDocumentBuilder(Class<T>
entity) {
+ return searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity );
}
/**
@@ -206,8 +206,6 @@
}
}
-
-
/**
* Closes a previously opened IndexWriter.
* @throws SearchException on IOException during Lucene close operation.
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -25,13 +25,14 @@
import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
import org.hibernate.search.backend.impl.jms.JMSBackendQueueProcessorFactory;
import org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.engine.DocumentBuilderContainedEntity;
import org.hibernate.search.util.LoggerFactory;
/**
- * Batch work until #performWorks is called.
- * The work is then executed synchronously or asynchronously
+ * Batch work until {@link #performWorks} is called.
+ * The work is then executed synchronously or asynchronously.
*
* @author Emmanuel Bernard
*/
@@ -148,17 +149,21 @@
Class<T> entityClass = work.getEntityClass() != null ?
work.getEntityClass() :
Hibernate.getClass( work.getEntity() );
- DocumentBuilder<T> builder = searchFactoryImplementor.getDocumentBuilder(
entityClass );
- if ( builder == null ) {
- //might be a entity contained in
- builder = searchFactoryImplementor.getContainedInOnlyBuilder( entityClass );
- }
- if ( builder == null ) {
+ DocumentBuilderIndexedEntity<T> entityBuilder =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityClass );
+ if ( entityBuilder != null ) {
+ entityBuilder.addWorkToQueue(
+ entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue,
searchFactoryImplementor
+ );
return;
}
- builder.addWorkToQueue(
- entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue,
searchFactoryImplementor
- );
+
+ //might be a entity contained in
+ DocumentBuilderContainedEntity<T> containedInBuilder =
searchFactoryImplementor.getDocumentBuilderContainedEntity( entityClass );
+ if ( containedInBuilder != null ) {
+ containedInBuilder.addWorkToQueue(
+ entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue,
searchFactoryImplementor
+ );
+ }
}
//TODO implements parallel batchWorkers (one per Directory)
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -3,6 +3,8 @@
import java.util.Properties;
+import javax.transaction.Synchronization;
+
import org.hibernate.search.backend.QueueingProcessor;
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkQueue;
@@ -22,16 +24,18 @@
*/
public class TransactionalWorker implements Worker {
//not a synchronized map since for a given transaction, we have not concurrent access
- protected final WeakIdentityHashMap synchronizationPerTransaction = new
WeakIdentityHashMap();
+ protected final WeakIdentityHashMap<Object, Synchronization>
synchronizationPerTransaction = new WeakIdentityHashMap<Object, Synchronization>();
private QueueingProcessor queueingProcessor;
public void performWork(Work work, TransactionContext transactionContext) {
if ( transactionContext.isTransactionInProgress() ) {
Object transaction = transactionContext.getTransactionIdentifier();
- PostTransactionWorkQueueSynchronization txSync =
(PostTransactionWorkQueueSynchronization)
+ PostTransactionWorkQueueSynchronization txSync = (
PostTransactionWorkQueueSynchronization )
synchronizationPerTransaction.get( transaction );
if ( txSync == null || txSync.isConsumed() ) {
- txSync = new PostTransactionWorkQueueSynchronization( queueingProcessor,
synchronizationPerTransaction );
+ txSync = new PostTransactionWorkQueueSynchronization(
+ queueingProcessor, synchronizationPerTransaction
+ );
transactionContext.registerSynchronization( txSync );
synchronizationPerTransaction.put( transaction, txSync );
}
@@ -53,11 +57,10 @@
queueingProcessor.close();
}
-
public void flushWorks(TransactionContext transactionContext) {
if ( transactionContext.isTransactionInProgress() ) {
Object transaction = transactionContext.getTransactionIdentifier();
- PostTransactionWorkQueueSynchronization txSync =
(PostTransactionWorkQueueSynchronization)
+ PostTransactionWorkQueueSynchronization txSync = (
PostTransactionWorkQueueSynchronization )
synchronizationPerTransaction.get( transaction );
if ( txSync != null && !txSync.isConsumed() ) {
txSync.flushWorks();
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -6,7 +6,7 @@
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.impl.lucene.works.LuceneWorkVisitor;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.IndexShardingStrategy;
@@ -39,7 +39,7 @@
// divide tasks in parts, adding to QueueProcessors by affected Directory.
for ( LuceneWork work : queue ) {
final Class<?> entityType = work.getEntityClass();
- DocumentBuilder<?> documentBuilder =
searchFactoryImplementor.getDocumentBuilder( entityType );
+ DocumentBuilderIndexedEntity<?> documentBuilder =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityType );
IndexShardingStrategy shardingStrategy =
documentBuilder.getDirectoryProviderSelectionStrategy();
work.getWorkDelegate( providerSelectionVisitor ).addAsPayLoadsToQueue( work,
shardingStrategy, processors );
}
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -12,7 +12,7 @@
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
/**
@@ -40,7 +40,7 @@
}
public void performWork(LuceneWork work, IndexWriter writer) {
- DocumentBuilder documentBuilder = workspace.getDocumentBuilder( work.getEntityClass()
);
+ DocumentBuilderIndexedEntity documentBuilder = workspace.getDocumentBuilder(
work.getEntityClass() );
Analyzer analyzer = documentBuilder.getAnalyzer();
Similarity similarity = documentBuilder.getSimilarity();
if ( log.isTraceEnabled() ) {
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -10,7 +10,7 @@
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
import org.slf4j.Logger;
@@ -26,7 +26,7 @@
public class DeleteExtWorkDelegate extends DeleteWorkDelegate {
private final Class managedType;
- private final DocumentBuilder builder;
+ private final DocumentBuilderIndexedEntity builder;
private final Logger log = LoggerFactory.make();
DeleteExtWorkDelegate(Workspace workspace) {
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -15,7 +15,7 @@
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
/**
@@ -44,14 +44,14 @@
public void performWork(LuceneWork work, IndexWriter writer) {
final Class<?> entityType = work.getEntityClass();
log.trace( "Removing {}#{} by query.", entityType, work.getId() );
- DocumentBuilder<?> builder = workspace.getDocumentBuilder( entityType );
+ DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder(
entityType );
BooleanQuery entityDeletionQuery = new BooleanQuery();
TermQuery idQueryTerm = new TermQuery( builder.getTerm( work.getId() ) );
entityDeletionQuery.add( idQueryTerm, BooleanClause.Occur.MUST );
- Term classNameQueryTerm = new Term( DocumentBuilder.CLASS_FIELDNAME,
entityType.getName() );
+ Term classNameQueryTerm = new Term( DocumentBuilderIndexedEntity.CLASS_FIELDNAME,
entityType.getName() );
TermQuery classNameQuery = new TermQuery( classNameQueryTerm );
entityDeletionQuery.add( classNameQuery, BooleanClause.Occur.MUST );
@@ -78,7 +78,7 @@
*/
final Class<?> entityType = work.getEntityClass();
log.trace( "Removing {}#{} from Lucene index.", entityType, work.getId() );
- DocumentBuilder<?> builder = workspace.getDocumentBuilder( entityType );
+ DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder(
entityType );
Term term = builder.getTerm( work.getId() );
TermDocs termDocs = null;
try {
@@ -88,7 +88,7 @@
String entityName = entityType.getName();
while ( termDocs.next() ) {
int docIndex = termDocs.doc();
- if ( entityName.equals( reader.document( docIndex ).get(
DocumentBuilder.CLASS_FIELDNAME ) ) ) {
+ if ( entityName.equals( reader.document( docIndex ).get(
DocumentBuilderIndexedEntity.CLASS_FIELDNAME ) ) ) {
//remove only the one of the right class
//loop all to remove all the matches (defensive code)
reader.deleteDocument( docIndex );
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -8,7 +8,7 @@
import org.hibernate.search.SearchException;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
/**
@@ -34,7 +34,7 @@
public void performWork(LuceneWork work, IndexWriter writer) {
log.trace( "purgeAll Lucene index using IndexWriter for type: {}",
work.getEntityClass() );
try {
- Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, work.getEntityClass().getName()
);
+ Term term = new Term( DocumentBuilderIndexedEntity.CLASS_FIELDNAME,
work.getEntityClass().getName() );
writer.deleteDocuments( term );
}
catch (Exception e) {
@@ -45,7 +45,7 @@
public void performWork(LuceneWork work, IndexReader reader) {
log.trace( "purgeAll Lucene index using IndexReader for type: {}",
work.getEntityClass() );
try {
- Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, work.getEntityClass().getName()
);
+ Term term = new Term( DocumentBuilderIndexedEntity.CLASS_FIELDNAME,
work.getEntityClass().getName() );
reader.deleteDocuments( term );
}
catch (Exception e) {
Deleted: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -1,1062 +0,0 @@
-//$Id$
-package org.hibernate.search.engine;
-
-import java.io.Serializable;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Similarity;
-import org.slf4j.Logger;
-
-import org.hibernate.Hibernate;
-import org.hibernate.annotations.common.AssertionFailure;
-import org.hibernate.annotations.common.reflection.ReflectionManager;
-import org.hibernate.annotations.common.reflection.XAnnotatedElement;
-import org.hibernate.annotations.common.reflection.XClass;
-import org.hibernate.annotations.common.reflection.XMember;
-import org.hibernate.annotations.common.reflection.XProperty;
-import org.hibernate.annotations.common.util.ReflectHelper;
-import org.hibernate.annotations.common.util.StringHelper;
-import org.hibernate.proxy.HibernateProxy;
-import org.hibernate.search.SearchException;
-import org.hibernate.search.annotations.AnalyzerDef;
-import org.hibernate.search.annotations.AnalyzerDefs;
-import org.hibernate.search.annotations.Boost;
-import org.hibernate.search.annotations.ClassBridge;
-import org.hibernate.search.annotations.ClassBridges;
-import org.hibernate.search.annotations.ContainedIn;
-import org.hibernate.search.annotations.DocumentId;
-import org.hibernate.search.annotations.Index;
-import org.hibernate.search.annotations.IndexedEmbedded;
-import org.hibernate.search.annotations.ProvidedId;
-import org.hibernate.search.annotations.Store;
-import org.hibernate.search.annotations.TermVector;
-import org.hibernate.search.backend.AddLuceneWork;
-import org.hibernate.search.backend.DeleteLuceneWork;
-import org.hibernate.search.backend.LuceneWork;
-import org.hibernate.search.backend.PurgeAllLuceneWork;
-import org.hibernate.search.backend.WorkType;
-import org.hibernate.search.bridge.BridgeFactory;
-import org.hibernate.search.bridge.FieldBridge;
-import org.hibernate.search.bridge.LuceneOptions;
-import org.hibernate.search.bridge.TwoWayFieldBridge;
-import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
-import org.hibernate.search.bridge.TwoWayStringBridge;
-import org.hibernate.search.impl.InitContext;
-import org.hibernate.search.store.DirectoryProvider;
-import org.hibernate.search.store.IndexShardingStrategy;
-import org.hibernate.search.util.BinderHelper;
-import org.hibernate.search.util.LoggerFactory;
-import org.hibernate.search.util.ScopedAnalyzer;
-
-/**
- * Set up and provide a manager for indexed classes.
- *
- * @author Gavin King
- * @author Emmanuel Bernard
- * @author Sylvain Vieujot
- * @author Richard Hallier
- * @author Hardy Ferentschik
- */
-public class DocumentBuilder<T> {
- private static final Logger log = LoggerFactory.make();
-
- private final PropertiesMetadata rootPropertiesMetadata = new PropertiesMetadata();
- private final XClass beanClass;
- private final DirectoryProvider[] directoryProviders;
- private final IndexShardingStrategy shardingStrategy;
- private String idKeywordName;
-
- /**
- * Flag indicating whether <code>@DocumentId</code> was explicitly
specified.
- */
- private boolean explicitDocumentId = false;
-
- /**
- * Flag indicating whether {@link org.apache.lucene.search.Searcher#doc(int,
org.apache.lucene.document.FieldSelector)}
- * can be used in order to retrieve documents. This is only safe to do if we know that
- * all involved bridges are implementing <code>TwoWayStringBridge</code>.
See HSEARCH-213.
- */
- private boolean allowFieldSelectionInProjection = false;
-
- private XMember idGetter;
- private Float idBoost;
- public static final String CLASS_FIELDNAME = "_hibernate_class";
- private TwoWayFieldBridge idBridge;
- private Set<Class<?>> mappedSubclasses = new
HashSet<Class<?>>();
- private ReflectionManager reflectionManager; //available only during initializationa nd
post-initialization
- private int level = 0;
- private int maxLevel = Integer.MAX_VALUE;
- private final ScopedAnalyzer analyzer = new ScopedAnalyzer();
- private Similarity similarity;
- private boolean isRoot;
- //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- private boolean safeFromTupleId;
- private boolean idProvided = false;
- private EntityState entityState;
-
- /**
- * Constructor used on an @Indexed entity.
- */
- public DocumentBuilder(XClass clazz, InitContext context, DirectoryProvider[]
directoryProviders,
- IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
- this.entityState = EntityState.INDEXED;
- this.beanClass = clazz;
- this.directoryProviders = directoryProviders;
- this.shardingStrategy = shardingStrategy;
- //set to null after post-initialization
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
-
- init( clazz, context, reflectionManager );
- }
-
- /**
- * Constructor used on a non @Indexed entity.
- */
- public DocumentBuilder(XClass clazz, InitContext context, ReflectionManager
reflectionManager) {
- this.entityState = EntityState.CONTAINED_IN_ONLY;
- this.beanClass = clazz;
- this.directoryProviders = null;
- this.shardingStrategy = null;
-
-
- this.reflectionManager = reflectionManager;
- this.similarity = context.getDefaultSimilarity();
-
- init( clazz, context, reflectionManager );
- if ( rootPropertiesMetadata.containedInGetters.size() == 0 ) {
- this.entityState = EntityState.NON_INDEXABLE;
- }
- }
-
- private void init(XClass clazz, InitContext context, ReflectionManager
reflectionManager) {
- if ( clazz == null ) throw new AssertionFailure( "Unable to build a
DocumentBuilder with a null class" );
- rootPropertiesMetadata.boost = getBoost( clazz );
- rootPropertiesMetadata.analyzer = context.getDefaultAnalyzer();
- Set<XClass> processedClasses = new HashSet<XClass>();
- processedClasses.add( clazz );
- initializeMembers( clazz, rootPropertiesMetadata, true, "", processedClasses,
context );
- //processedClasses.remove( clazz ); for the sake of completness
- this.analyzer.setGlobalAnalyzer( rootPropertiesMetadata.analyzer );
- if ( entityState == EntityState.INDEXED && idKeywordName == null ) {
- // if no DocumentId then check if we have a ProvidedId instead
- ProvidedId provided = findProvidedId( clazz, reflectionManager );
- if ( provided == null ) throw new SearchException( "No document id in: " +
clazz.getName() );
-
- idBridge = BridgeFactory.extractTwoWayType( provided.bridge() );
- idKeywordName = provided.name();
- }
-
- //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails
on most database
- //a TwoWayString2FieldBridgeAdaptor is never a composite id
- safeFromTupleId = entityState != EntityState.INDEXED ||
TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom( idBridge.getClass() );
- checkAllowFieldSelection();
- if ( log.isDebugEnabled() ) {
- log.debug( "Field selection in projections is set to {} for entity {}.",
allowFieldSelectionInProjection, clazz.getName() );
- }
- }
-
- /**
- * Checks whether all involved bridges are two way string bridges. If so we can optimize
document retrieval
- * by using <code>FieldSelector</code>. See HSEARCH-213.
- */
- private void checkAllowFieldSelection() {
- allowFieldSelectionInProjection = true;
- if ( ! (idBridge instanceof TwoWayStringBridge || idBridge instanceof
TwoWayString2FieldBridgeAdaptor) ) {
- allowFieldSelectionInProjection = false;
- return;
- }
- for ( FieldBridge bridge : rootPropertiesMetadata.fieldBridges) {
- if ( !( bridge instanceof TwoWayStringBridge || bridge instanceof
TwoWayString2FieldBridgeAdaptor ) ) {
- allowFieldSelectionInProjection = false;
- return;
- }
- }
- }
-
- public boolean isRoot() {
- return isRoot;
- }
-
- public boolean allowFieldSelectionInProjection() {
- return allowFieldSelectionInProjection;
- }
-
- private ProvidedId findProvidedId(XClass clazz, ReflectionManager reflectionManager) {
- ProvidedId id = null;
- XClass currentClass = clazz;
- while ( id == null && ( !reflectionManager.equals( currentClass, Object.class )
) ) {
- id = currentClass.getAnnotation( ProvidedId.class );
- currentClass = clazz.getSuperclass();
- }
- return id;
- }
-
- private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
- org.hibernate.search.annotations.Analyzer analyzerAnn =
- annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
- return getAnalyzer( analyzerAnn, context );
- }
-
- private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn,
InitContext context) {
- Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
- if ( analyzerClass == void.class ) {
- String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
- if ( StringHelper.isEmpty( definition ) ) {
- return null;
- }
- else {
-
- return context.buildLazyAnalyzer( definition );
- }
- }
- else {
- try {
- return (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new SearchException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() +
": " + analyzerClass.getName(), e
- );
- }
- catch (Exception e) {
- throw new SearchException( "Failed to instantiate lucene analyzer with type
" + analyzerClass.getName(), e );
- }
- }
- }
-
- private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata,
boolean isRoot, String prefix,
- Set<XClass> processedClasses, InitContext context) {
- List<XClass> hierarchy = new ArrayList<XClass>();
- for (XClass currClass = clazz; currClass != null; currClass =
currClass.getSuperclass()) {
- hierarchy.add( currClass );
- }
- Class similarityClass = null;
- for (int index = hierarchy.size() - 1; index >= 0; index--) {
- XClass currClass = hierarchy.get( index );
- /**
- * Override the default analyzer for the properties if the class hold one
- * That's the reason we go down the hierarchy
- */
- Analyzer analyzer = getAnalyzer( currClass, context );
-
- if ( analyzer != null ) {
- propertiesMetadata.analyzer = analyzer;
- }
- checkForAnalyzerDefs( currClass, context );
- // Check for any ClassBridges annotation.
- ClassBridges classBridgesAnn = currClass.getAnnotation( ClassBridges.class );
- if ( classBridgesAnn != null ) {
- ClassBridge[] cbs = classBridgesAnn.value();
- for (ClassBridge cb : cbs) {
- bindClassAnnotation( prefix, propertiesMetadata, cb, context );
- }
- }
-
- // Check for any ClassBridge style of annotations.
- ClassBridge classBridgeAnn = currClass.getAnnotation( ClassBridge.class );
- if ( classBridgeAnn != null ) {
- bindClassAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
- }
-
- //Get similarity
- //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
- if ( isRoot ) {
- org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation(
org.hibernate.search.annotations.Similarity.class );
- if ( similarityAnn != null ) {
- if ( similarityClass != null ) {
- throw new SearchException( "Multiple Similarities defined in the same class
hierarchy: " + beanClass.getName() );
- }
- similarityClass = similarityAnn.impl();
- }
- }
-
- //rejecting non properties (ie regular methods) because the object is loaded from
Hibernate,
- // so indexing a non property does not make sense
- List<XProperty> methods = currClass.getDeclaredProperties(
XClass.ACCESS_PROPERTY );
- for (XProperty method : methods) {
- initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses,
context );
- }
-
- List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD
);
- for (XProperty field : fields) {
- initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses,
context );
- }
- }
- if ( isRoot && similarityClass != null ) {
- try {
- similarity = (Similarity) similarityClass.newInstance();
- }
- catch (Exception e) {
- log.error( "Exception attempting to instantiate Similarity '{}' set for
{}",
- similarityClass.getName(), beanClass.getName() );
- }
- }
- }
-
- private void checkForAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext
context) {
- AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
- if ( defs != null ) {
- for (AnalyzerDef def : defs.value()) {
- context.addAnalyzerDef( def );
- }
- }
- AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
- context.addAnalyzerDef( def );
- }
-
- public String getIdentifierName() {
- return idGetter.getName();
- }
-
- public Similarity getSimilarity() {
- return similarity;
- }
-
- private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata,
boolean isRoot,
- String prefix, Set<XClass> processedClasses, InitContext context) {
-
- checkDocumentId( member, propertiesMetadata, isRoot, prefix, context );
- checkForField( member, propertiesMetadata, prefix, context );
- checkForFields( member, propertiesMetadata, prefix, context );
- checkForAnalyzerDefs( member, context );
- checkForIndexedEmbedded( member, propertiesMetadata, prefix, processedClasses, context
);
- checkForConstraintIn( member, propertiesMetadata );
- }
-
- private void checkForFields(XProperty member, PropertiesMetadata propertiesMetadata,
String prefix, InitContext context) {
- org.hibernate.search.annotations.Fields fieldsAnn =
- member.getAnnotation( org.hibernate.search.annotations.Fields.class );
- if ( fieldsAnn != null ) {
- for (org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value()) {
- bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
- }
- }
- }
-
- private void checkForField(XProperty member, PropertiesMetadata propertiesMetadata,
String prefix, InitContext context) {
- org.hibernate.search.annotations.Field fieldAnn =
- member.getAnnotation( org.hibernate.search.annotations.Field.class );
- if ( fieldAnn != null ) {
- bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
- }
- }
-
- private void checkForConstraintIn(XProperty member, PropertiesMetadata
propertiesMetadata) {
- ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
- if ( containedAnn != null ) {
- setAccessible( member );
- propertiesMetadata.containedInGetters.add( member );
- }
- }
-
- private void checkForIndexedEmbedded(XProperty member, PropertiesMetadata
propertiesMetadata, String prefix, Set<XClass> processedClasses, InitContext
context) {
- IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
- if ( embeddedAnn != null ) {
- int oldMaxLevel = maxLevel;
- int potentialLevel = embeddedAnn.depth() + level;
- if ( potentialLevel < 0 ) {
- potentialLevel = Integer.MAX_VALUE;
- }
- maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
- level++;
-
- XClass elementClass;
- if ( void.class == embeddedAnn.targetElement() ) {
- elementClass = member.getElementClass();
- }
- else {
- elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
- }
- if ( maxLevel == Integer.MAX_VALUE //infinite
- && processedClasses.contains( elementClass ) ) {
- throw new SearchException(
- "Circular reference. Duplicate use of "
- + elementClass.getName()
- + " in root entity " + beanClass.getName()
- + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
- );
- }
- if ( level <= maxLevel ) {
- processedClasses.add( elementClass ); //push
-
- setAccessible( member );
- propertiesMetadata.embeddedGetters.add( member );
- PropertiesMetadata metadata = new PropertiesMetadata();
- propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
- metadata.boost = getBoost( member, null );
- //property > entity analyzer
- Analyzer analyzer = getAnalyzer( member, context );
- metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
- String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
- initializeMembers( elementClass, metadata, false, localPrefix, processedClasses,
context );
- /**
- * We will only index the "expected" type but that's OK, HQL cannot do
downcasting either
- */
- if ( member.isArray() ) {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
- }
- else if ( member.isCollection() ) {
- if ( Map.class.equals( member.getCollectionClass() ) ) {
- //hum subclasses etc etc??
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
- }
- else {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION
);
- }
- }
- else {
- propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
- }
-
- processedClasses.remove( elementClass ); //pop
- }
- else if ( log.isTraceEnabled() ) {
- String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
- log.trace( "depth reached, ignoring {}", localPrefix );
- }
-
- level--;
- maxLevel = oldMaxLevel; //set back the the old max level
- }
- }
-
- private void checkDocumentId(XProperty member, PropertiesMetadata propertiesMetadata,
boolean isRoot, String prefix, InitContext context) {
- Annotation idAnnotation = getIdAnnotation( member, context );
- if ( idAnnotation != null ) {
- String attributeName = getIdAttributeName( member, idAnnotation );
- if ( isRoot ) {
- if ( idKeywordName != null && explicitDocumentId ) {
- throw new AssertionFailure( "Two document id assigned: "
- + idKeywordName + " and " + attributeName );
- }
- idKeywordName = prefix + attributeName;
- FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager
);
- if ( fieldBridge instanceof TwoWayFieldBridge ) {
- idBridge = (TwoWayFieldBridge) fieldBridge;
- }
- else {
- throw new SearchException(
- "Bridge for document id does not implement TwoWayFieldBridge: " +
member.getName() );
- }
- idBoost = getBoost( member, null );
- setAccessible( member );
- idGetter = member;
- }
- else {
- //component should index their document id
- setAccessible( member );
- propertiesMetadata.fieldGetters.add( member );
- String fieldName = prefix + attributeName;
- propertiesMetadata.fieldNames.add( fieldName );
- propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
- propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
- propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
- propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member,
reflectionManager ) );
- propertiesMetadata.fieldBoosts.add( getBoost( member, null ) );
- // property > entity analyzer (no field analyzer)
- Analyzer analyzer = getAnalyzer( member, context );
- if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
- if ( analyzer == null ) throw new AssertionFailure( "Analizer should not be
undefined" );
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
- }
- }
-
- /**
- * Checks whether the specified property contains an annotation used as document id.
- * This can either be an explicit <code>@DocumentId</code> or if no
<code>@DocumentId</code> is specified a
- * JPA <code>@Id</code> annotation. The check for the JPA annotation is
indirectly to avoid a hard dependency
- * to Hibernate Annotations.
- *
- * @param member the property to check for the id annotation.
- * @return the annotation used as document id or <code>null</code> if id
annotation is specified on the property.
- */
- private Annotation getIdAnnotation(XProperty member, InitContext context) {
- // check for explicit DocumentId
- Annotation documentIdAnn = member.getAnnotation( DocumentId.class );
- if ( documentIdAnn != null ) {
- explicitDocumentId = true;
- return documentIdAnn;
- }
-
- // check for JPA @Id
- if ( !explicitDocumentId && context.isJpaPresent() ) {
- Class idClass;
- try {
- idClass = org.hibernate.util.ReflectHelper.classForName(
"javax.persistence.Id", InitContext.class );
- } catch ( ClassNotFoundException e ) {
- throw new SearchException( "Unable to load @Id.class even though it should be
present ?!" );
- }
- documentIdAnn = member.getAnnotation( idClass );
- if ( documentIdAnn != null )
- log.debug( "Found JPA id and using it as document id" );
- }
- return documentIdAnn;
- }
-
- /**
- * Determines the property name for the document id. It is either the name of the
property itself or the
- * value of the name attribute of the <code>idAnnotation</code>.
- *
- * @param member the property used as id property.
- * @param idAnnotation the id annotation
- * @return property name to be used as document id.
- */
- private String getIdAttributeName(XProperty member, Annotation idAnnotation) {
- String name = null;
- try {
- Method m = idAnnotation.getClass().getMethod( "name" );
- name = (String) m.invoke( idAnnotation );
- }
- catch ( Exception e ) {
- // ignore
- }
-
- return BinderHelper.getAttributeName( member, name );
- }
-
- private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata,
ClassBridge ann, InitContext context) {
- //FIXME name should be prefixed
- String fieldName = prefix + ann.name();
- propertiesMetadata.classNames.add( fieldName );
- propertiesMetadata.classStores.add( getStore( ann.store() ) );
- propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
- propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
- propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
- propertiesMetadata.classBoosts.add( ann.boost().value() );
-
- Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
- if ( analyzer == null ) analyzer = propertiesMetadata.analyzer;
- if ( analyzer == null ) throw new AssertionFailure( "Analyzer should not be
undefined" );
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
-
- private void bindFieldAnnotation(XProperty member, PropertiesMetadata
propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn,
InitContext context) {
- setAccessible( member );
- propertiesMetadata.fieldGetters.add( member );
- String fieldName = prefix + BinderHelper.getAttributeName( member, fieldAnn.name() );
- propertiesMetadata.fieldNames.add( fieldName );
- propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
- propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
- propertiesMetadata.fieldBoosts.add( getBoost(member, fieldAnn) );
- propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
- propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member,
reflectionManager ) );
-
- // Field > property > entity analyzer
- Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
- if ( analyzer == null ) analyzer = getAnalyzer( member, context );
- if ( analyzer != null ) {
- this.analyzer.addScopedAnalyzer( fieldName, analyzer );
- }
- }
-
- private Float getBoost(XProperty member, org.hibernate.search.annotations.Field
fieldAnn) {
- float computedBoost = 1.0f;
- Boost boostAnn = member.getAnnotation( Boost.class );
- if (boostAnn != null) computedBoost = boostAnn.value();
- if (fieldAnn != null) computedBoost *= fieldAnn.boost().value();
- return computedBoost;
- }
-
- private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty
member) {
- String localPrefix = prefix;
- if ( ".".equals( embeddedAnn.prefix() ) ) {
- //default to property name
- localPrefix += member.getName() + '.';
- }
- else {
- localPrefix += embeddedAnn.prefix();
- }
- return localPrefix;
- }
-
- private Field.Store getStore(Store store) {
- switch ( store ) {
- case NO:
- return Field.Store.NO;
- case YES:
- return Field.Store.YES;
- case COMPRESS:
- return Field.Store.COMPRESS;
- default:
- throw new AssertionFailure( "Unexpected Store: " + store );
- }
- }
-
- private Field.TermVector getTermVector(TermVector vector) {
- switch ( vector ) {
- case NO:
- return Field.TermVector.NO;
- case YES:
- return Field.TermVector.YES;
- case WITH_OFFSETS:
- return Field.TermVector.WITH_OFFSETS;
- case WITH_POSITIONS:
- return Field.TermVector.WITH_POSITIONS;
- case WITH_POSITION_OFFSETS:
- return Field.TermVector.WITH_POSITIONS_OFFSETS;
- default:
- throw new AssertionFailure( "Unexpected TermVector: " + vector );
- }
- }
-
- private Field.Index getIndex(Index index) {
- switch ( index ) {
- case NO:
- return Field.Index.NO;
- case NO_NORMS:
- return Field.Index.NOT_ANALYZED_NO_NORMS;
- case TOKENIZED:
- return Field.Index.ANALYZED;
- case UN_TOKENIZED:
- return Field.Index.NOT_ANALYZED;
- default:
- throw new AssertionFailure( "Unexpected Index: " + index );
- }
- }
-
- private Float getBoost(XClass element) {
- if ( element == null ) return null;
- Boost boost = element.getAnnotation( Boost.class );
- return boost != null ?
- boost.value() :
- null;
- }
-
- private Object getMemberValue(Object bean, XMember getter) {
- Object value;
- try {
- value = getter.invoke( bean );
- }
- catch (Exception e) {
- throw new IllegalStateException( "Could not get property value", e );
- }
- return value;
- }
-
- //TODO could we use T instead of EntityClass?
- public void addWorkToQueue(Class<T> entityClass, T entity, Serializable id,
WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor
searchFactoryImplementor) {
- //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work
recognition
- if ( entityState == EntityState.INDEXED ) {
- List<LuceneWork> toDelete = new ArrayList<LuceneWork>();
- boolean duplicateDelete = false;
- for (LuceneWork luceneWork : queue) {
- //avoid unecessary duplicated work
- if ( luceneWork.getEntityClass() == entityClass
- ) {
- Serializable currentId = luceneWork.getId();
- //currentId != null => either ADD or Delete work
- if ( currentId != null && currentId.equals( id ) ) { //find a way to use
Type.equals(x,y)
- if ( workType == WorkType.DELETE ) { //TODO add PURGE?
- //DELETE should have precedence over any update before (HSEARCH-257)
- //if an Add work is here, remove it
- //if an other delete is here remember but still search for Add
- if ( luceneWork instanceof AddLuceneWork ) {
- toDelete.add( luceneWork );
- }
- else if ( luceneWork instanceof DeleteLuceneWork ) {
- duplicateDelete = true;
- }
- }
- else {
- //we can safely say we are out, the other work is an ADD
- return;
- }
- }
- //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
- }
- }
- for (LuceneWork luceneWork : toDelete) {
- queue.remove( luceneWork );
- }
- if ( duplicateDelete ) return;
-
- String idInString = idBridge.objectToString( id );
- if ( workType == WorkType.ADD ) {
- Document doc = getDocument( entity, id );
- queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
- }
- else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- }
- else if ( workType == WorkType.PURGE_ALL ) {
- queue.add( new PurgeAllLuceneWork( entityClass ) );
- }
- else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
- Document doc = getDocument( entity, id );
- /**
- * even with Lucene 2.1, use of indexWriter to update is not an option
- * We can only delete by term, and the index doesn't have a term that
- * uniquely identify the entry.
- * But essentially the optimization we are doing is the same Lucene is doing, the
only extra cost is the
- * double file opening.
- */
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
- }
- else if ( workType == WorkType.INDEX ) {
- Document doc = getDocument( entity, id );
- queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
- queue.add( new AddLuceneWork( id, idInString, entityClass, doc, true ) );
- }
- else {
- throw new AssertionFailure( "Unknown WorkType: " + workType );
- }
- }
-
- /**
- * When references are changed, either null or another one, we expect dirty checking to
be triggered (both sides
- * have to be updated)
- * When the internal object is changed, we apply the {Add|Update}Work on containedIns
- */
- if ( workType.searchForContainers() ) {
- processContainedIn( entity, queue, rootPropertiesMetadata, searchFactoryImplementor
);
- }
- }
-
- private void processContainedIn(Object instance, List<LuceneWork> queue,
PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
- for (int i = 0; i < metadata.containedInGetters.size(); i++) {
- XMember member = metadata.containedInGetters.get( i );
- Object value = getMemberValue( instance, member );
- if ( value == null ) continue;
-
- if ( member.isArray() ) {
- for (Object arrayValue : (Object[]) value) {
- //highly inneficient but safe wrt the actual targeted class
- Class<?> valueClass = Hibernate.getClass( arrayValue );
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder(
valueClass );
- if ( builder == null ) continue;
- processContainedInValue( arrayValue, queue, valueClass, builder,
searchFactoryImplementor );
- }
- }
- else if ( member.isCollection() ) {
- Collection collection;
- if ( Map.class.equals( member.getCollectionClass() ) ) {
- //hum
- collection = ( (Map) value ).values();
- }
- else {
- collection = (Collection) value;
- }
- for (Object collectionValue : collection) {
- //highly inneficient but safe wrt the actual targeted class
- Class<?> valueClass = Hibernate.getClass( collectionValue );
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder(
valueClass );
- if ( builder == null ) continue;
- processContainedInValue( collectionValue, queue, valueClass, builder,
searchFactoryImplementor );
- }
- }
- else {
- Class<?> valueClass = Hibernate.getClass( value );
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder(
valueClass );
- if ( builder == null ) continue;
- processContainedInValue( value, queue, valueClass, builder, searchFactoryImplementor
);
- }
- }
- //an embedded cannot have a useful @ContainedIn (no shared reference)
- //do not walk through them
- }
-
- private void processContainedInValue(Object value, List<LuceneWork> queue,
Class<?> valueClass,
- DocumentBuilder builder, SearchFactoryImplementor searchFactoryImplementor) {
- Serializable id = (Serializable) builder.getMemberValue( value, builder.idGetter );
- builder.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue,
searchFactoryImplementor );
- }
-
- public Document getDocument(T instance, Serializable id) {
- Document doc = new Document();
- final Class<?> entityType = Hibernate.getClass( instance );
- //XClass instanceClass = reflectionManager.toXClass( entityType );
- if ( rootPropertiesMetadata.boost != null ) {
- doc.setBoost( rootPropertiesMetadata.boost );
- }
- {
- Field classField =
- new Field( CLASS_FIELDNAME, entityType.getName(), Field.Store.YES,
Field.Index.NOT_ANALYZED, Field.TermVector.NO );
- doc.add( classField );
- LuceneOptions luceneOptions = new LuceneOptionsImpl( Field.Store.YES,
- Field.Index.NOT_ANALYZED, Field.TermVector.NO, idBoost );
- idBridge.set( idKeywordName, id, doc, luceneOptions );
- }
- buildDocumentFields( instance, doc, rootPropertiesMetadata );
- return doc;
- }
-
- private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata
propertiesMetadata) {
- if ( instance == null ) return;
- //needed for field access: I cannot work in the proxied version
- Object unproxiedInstance = unproxy( instance );
- for (int i = 0; i < propertiesMetadata.classBridges.size(); i++) {
- FieldBridge fb = propertiesMetadata.classBridges.get( i );
- fb.set( propertiesMetadata.classNames.get( i ), unproxiedInstance,
- doc, propertiesMetadata.getClassLuceneOptions( i ) );
- }
- for (int i = 0; i < propertiesMetadata.fieldNames.size(); i++) {
- XMember member = propertiesMetadata.fieldGetters.get( i );
- Object value = getMemberValue( unproxiedInstance, member );
- propertiesMetadata.fieldBridges.get( i ).set(
- propertiesMetadata.fieldNames.get( i ), value, doc,
- propertiesMetadata.getFieldLuceneOptions( i ) );
- }
- for (int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++) {
- XMember member = propertiesMetadata.embeddedGetters.get( i );
- Object value = getMemberValue( unproxiedInstance, member );
- //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
-
- if ( value == null ) continue;
- PropertiesMetadata embeddedMetadata =
propertiesMetadata.embeddedPropertiesMetadata.get( i );
- switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
- case ARRAY:
- for (Object arrayValue : (Object[]) value) {
- buildDocumentFields( arrayValue, doc, embeddedMetadata );
- }
- break;
- case COLLECTION:
- for (Object collectionValue : (Collection) value) {
- buildDocumentFields( collectionValue, doc, embeddedMetadata );
- }
- break;
- case MAP:
- for (Object collectionValue : ( (Map) value ).values()) {
- buildDocumentFields( collectionValue, doc, embeddedMetadata );
- }
- break;
- case OBJECT:
- buildDocumentFields( value, doc, embeddedMetadata );
- break;
- default:
- throw new AssertionFailure( "Unknown embedded container: "
- + propertiesMetadata.embeddedContainers.get( i ) );
- }
- }
- }
-
- private Object unproxy(Object value) {
- //FIXME this service should be part of Core?
- if ( value instanceof HibernateProxy ) {
- // .getImplementation() initializes the data by side effect
- value = ( (HibernateProxy) value ).getHibernateLazyInitializer()
- .getImplementation();
- }
- return value;
- }
-
- public Term getTerm(Serializable id) {
- if ( idProvided ) {
- return new Term( idKeywordName, (String) id );
- }
-
- return new Term( idKeywordName, idBridge.objectToString( id ) );
- }
-
- public DirectoryProvider[] getDirectoryProviders() {
- if ( entityState != EntityState.INDEXED ) {
- throw new AssertionFailure( "Contained in only entity: getDirectoryProvider
should not have been called." );
- }
- return directoryProviders;
- }
-
- public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
- if ( entityState != EntityState.INDEXED ) {
- throw new AssertionFailure( "Contained in only entity:
getDirectoryProviderSelectionStrategy should not have been called." );
- }
- return shardingStrategy;
- }
-
- public Analyzer getAnalyzer() {
- return analyzer;
- }
-
- private static void setAccessible(XMember member) {
- if ( !Modifier.isPublic( member.getModifiers() ) ) {
- member.setAccessible( true );
- }
- }
-
- public TwoWayFieldBridge getIdBridge() {
- return idBridge;
- }
-
- public String getIdKeywordName() {
- return idKeywordName;
- }
-
- public static Class getDocumentClass(Document document) {
- String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
- try {
- return ReflectHelper.classForName( className );
- }
- catch (ClassNotFoundException e) {
- throw new SearchException( "Unable to load indexed class: " + className, e
);
- }
- }
-
- public static Serializable getDocumentId(SearchFactoryImplementor
searchFactoryImplementor, Class<?> clazz, Document document) {
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder( clazz
);
- if ( builder == null ) throw new SearchException( "No Lucene configuration set up
for: " + clazz.getName() );
- return (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(), document
);
- }
-
- public static Object[] getDocumentFields(SearchFactoryImplementor
searchFactoryImplementor, Class<?> clazz, Document document, String[] fields) {
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder( clazz
);
- if ( builder == null ) throw new SearchException( "No Lucene configuration set up
for: " + clazz.getName() );
- final int fieldNbr = fields.length;
- Object[] result = new Object[fieldNbr];
-
- if ( builder.idKeywordName != null ) {
- populateResult( builder.idKeywordName, builder.idBridge, Field.Store.YES, fields,
result, document );
- }
-
- final PropertiesMetadata metadata = builder.rootPropertiesMetadata;
- processFieldsForProjection( metadata, fields, result, document );
- return result;
- }
-
- private static void processFieldsForProjection(PropertiesMetadata metadata, String[]
fields, Object[] result, Document document) {
- final int nbrFoEntityFields = metadata.fieldNames.size();
- for (int index = 0; index < nbrFoEntityFields; index++) {
- populateResult( metadata.fieldNames.get( index ),
- metadata.fieldBridges.get( index ),
- metadata.fieldStore.get( index ),
- fields,
- result,
- document
- );
- }
- final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
- for (int index = 0; index < nbrOfEmbeddedObjects; index++) {
- //there is nothing we can do for collections
- if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT )
{
- processFieldsForProjection( metadata.embeddedPropertiesMetadata.get( index ), fields,
result, document );
- }
- }
- }
-
- private static void populateResult(String fieldName, FieldBridge fieldBridge,
Field.Store store,
- String[] fields, Object[] result, Document document) {
- int matchingPosition = getFieldPosition( fields, fieldName );
- if ( matchingPosition != -1 ) {
- //TODO make use of an isTwoWay() method
- if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom(
fieldBridge.getClass() ) ) {
- result[matchingPosition] = ( (TwoWayFieldBridge) fieldBridge ).get( fieldName,
document );
- if ( log.isTraceEnabled() ) {
- log.trace( "Field {} projected as {}", fieldName, result[matchingPosition]
);
- }
- }
- else {
- if ( store == Field.Store.NO ) {
- throw new SearchException( "Projecting an unstored field: " + fieldName
);
- }
- else {
- throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " +
fieldBridge.getClass() );
- }
- }
- }
- }
-
- private static int getFieldPosition(String[] fields, String fieldName) {
- int fieldNbr = fields.length;
- for (int index = 0; index < fieldNbr; index++) {
- if ( fieldName.equals( fields[index] ) ) return index;
- }
- return -1;
- }
-
- public void postInitialize(Set<Class<?>> indexedClasses) {
- if ( entityState == EntityState.NON_INDEXABLE )
- throw new AssertionFailure( "A non indexed entity is post processed" );
- //this method does not requires synchronization
- Class plainClass = reflectionManager.toClass( beanClass );
- Set<Class<?>> tempMappedSubclasses = new HashSet<Class<?>>();
- //together with the caller this creates a o(2), but I think it's still faster than
create the up hierarchy for each class
- for (Class currentClass : indexedClasses) {
- if ( plainClass.isAssignableFrom( currentClass ) ) tempMappedSubclasses.add(
currentClass );
- }
- this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
- Class superClass = plainClass.getSuperclass();
- this.isRoot = true;
- while ( superClass != null ) {
- if ( indexedClasses.contains( superClass ) ) {
- this.isRoot = false;
- break;
- }
- superClass = superClass.getSuperclass();
- }
- this.reflectionManager = null;
- }
-
- public EntityState getEntityState() {
- return entityState;
- }
-
- public Set<Class<?>> getMappedSubclasses() {
- return mappedSubclasses;
- }
-
- /**
- * Make sure to return false if there is a risk of composite id
- * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
- */
- public boolean isSafeFromTupleId() {
- return safeFromTupleId;
- }
-
- /**
- * Wrapper class containing all the meta data extracted out of the entities.
- */
- private static class PropertiesMetadata {
- public Float boost;
- public Analyzer analyzer;
- public final List<String> fieldNames = new ArrayList<String>();
- public final List<XMember> fieldGetters = new ArrayList<XMember>();
- public final List<FieldBridge> fieldBridges = new
ArrayList<FieldBridge>();
- public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
- public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
- public final List<Float> fieldBoosts = new ArrayList<Float>();
- public final List<Field.TermVector> fieldTermVectors = new
ArrayList<Field.TermVector>();
- public final List<XMember> embeddedGetters = new ArrayList<XMember>();
- public final List<PropertiesMetadata> embeddedPropertiesMetadata = new
ArrayList<PropertiesMetadata>();
- public final List<Container> embeddedContainers = new
ArrayList<Container>();
- public final List<XMember> containedInGetters = new ArrayList<XMember>();
- public final List<String> classNames = new ArrayList<String>();
- public final List<Field.Store> classStores = new ArrayList<Field.Store>();
- public final List<Field.Index> classIndexes = new
ArrayList<Field.Index>();
- public final List<FieldBridge> classBridges = new
ArrayList<FieldBridge>();
- public final List<Field.TermVector> classTermVectors = new
ArrayList<Field.TermVector>();
- public final List<Float> classBoosts = new ArrayList<Float>();
-
- public enum Container {
- OBJECT,
- COLLECTION,
- MAP,
- ARRAY
- }
-
- private LuceneOptions getClassLuceneOptions(int i) {
- LuceneOptions options = new LuceneOptionsImpl( classStores.get( i ),
- classIndexes.get( i ), classTermVectors.get( i ), classBoosts.get( i ) );
- return options;
- }
-
- private LuceneOptions getFieldLuceneOptions(int i) {
- LuceneOptions options;
- options = new LuceneOptionsImpl( fieldStore.get( i ),
- fieldIndex.get( i ), fieldTermVectors.get( i ), fieldBoosts.get( i ) );
- return options;
- }
- }
-}
Copied:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
(from rev 15603, search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java)
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
(rev 0)
+++
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -0,0 +1,1011 @@
+//$Id$
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Similarity;
+import org.slf4j.Logger;
+
+import org.hibernate.Hibernate;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XAnnotatedElement;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.annotations.common.reflection.XProperty;
+import org.hibernate.annotations.common.util.ReflectHelper;
+import org.hibernate.annotations.common.util.StringHelper;
+import org.hibernate.proxy.HibernateProxy;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.ClassBridges;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.bridge.BridgeFactory;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.TwoWayFieldBridge;
+import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.impl.InitContext;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.ReflectionHelper;
+import org.hibernate.search.util.ScopedAnalyzer;
+
+/**
+ * Set up and provide a manager for indexed classes.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Richard Hallier
+ * @author Hardy Ferentschik
+ */
+public class DocumentBuilderContainedEntity<T> {
+ private static final Logger log = LoggerFactory.make();
+
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+
+ protected final PropertiesMetadata metadata = new PropertiesMetadata();
+ protected final XClass beanClass;
+ protected String idKeywordName;
+
+ /**
+ * Flag indicating whether <code>@DocumentId</code> was explicitly
specified.
+ */
+ protected boolean explicitDocumentId = false;
+
+ /**
+ * Flag indicating whether {@link org.apache.lucene.search.Searcher#doc(int,
org.apache.lucene.document.FieldSelector)}
+ * can be used in order to retrieve documents. This is only safe to do if we know that
+ * all involved bridges are implementing <code>TwoWayStringBridge</code>.
See HSEARCH-213.
+ */
+ private boolean allowFieldSelectionInProjection = false;
+
+ protected XMember idGetter;
+ protected Float idBoost;
+ protected TwoWayFieldBridge idBridge;
+ protected Set<Class<?>> mappedSubclasses = new
HashSet<Class<?>>();
+ private ReflectionManager reflectionManager; //available only during initializationa nd
post-initialization
+ protected int level = 0;
+ protected int maxLevel = Integer.MAX_VALUE;
+ protected final ScopedAnalyzer analyzer = new ScopedAnalyzer();
+ protected Similarity similarity;
+ protected boolean isRoot;
+ //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ private boolean safeFromTupleId;
+ protected boolean idProvided = false;
+ protected EntityState entityState;
+
+ /**
+ * Constructor used on contained entities not annotated with
<code>@Indexed</code> themselves.
+ *
+ * @param clazz The class for which to build a
<code>DocumentBuilderContainedEntity</code>.
+ * @param context Handle to default configuration settings.
+ * @param reflectionManager Reflection manager to use for processing the annotations.
+ */
+ public DocumentBuilderContainedEntity(XClass clazz, InitContext context,
ReflectionManager reflectionManager) {
+
+ if ( clazz == null ) {
+ throw new AssertionFailure( "Unable to build a DocumentBuilderContainedEntity
with a null class" );
+ }
+
+ this.entityState = EntityState.CONTAINED_IN_ONLY;
+ this.beanClass = clazz;
+ this.reflectionManager = reflectionManager;
+
+ init( clazz, context );
+
+ if ( this.similarity == null ) {
+ this.similarity = context.getDefaultSimilarity();
+ }
+
+ if ( metadata.containedInGetters.size() == 0 ) {
+ this.entityState = EntityState.NON_INDEXABLE;
+ }
+ }
+
+ private void init(XClass clazz, InitContext context) {
+ metadata.boost = getBoost( clazz );
+ metadata.analyzer = context.getDefaultAnalyzer();
+
+ Set<XClass> processedClasses = new HashSet<XClass>();
+ processedClasses.add( clazz );
+ initializeMembers( clazz, metadata, true, "", processedClasses, context );
+
+ this.analyzer.setGlobalAnalyzer( metadata.analyzer );
+
+ //if composite id, use of (a, b) in ((1,2)TwoWayString2FieldBridgeAdaptor, (3,4)) fails
on most database
+ //a TwoWayString2FieldBridgeAdaptor is never a composite id
+ safeFromTupleId = entityState != EntityState.INDEXED ||
TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom(
+ idBridge.getClass()
+ );
+
+ checkAllowFieldSelection();
+ if ( log.isDebugEnabled() ) {
+ log.debug(
+ "Field selection in projections is set to {} for entity {}.",
+ allowFieldSelectionInProjection,
+ clazz.getName()
+ );
+ }
+ }
+
+ /**
+ * Checks whether all involved bridges are two way string bridges. If so we can optimize
document retrieval
+ * by using <code>FieldSelector</code>. See HSEARCH-213.
+ */
+ private void checkAllowFieldSelection() {
+ allowFieldSelectionInProjection = true;
+ if ( !( idBridge instanceof TwoWayStringBridge || idBridge instanceof
TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ for ( FieldBridge bridge : metadata.fieldBridges ) {
+ if ( !( bridge instanceof TwoWayStringBridge || bridge instanceof
TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ }
+ }
+
+ public boolean isRoot() {
+ return isRoot;
+ }
+
+ public boolean allowFieldSelectionInProjection() {
+ return allowFieldSelectionInProjection;
+ }
+
+ private Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
+ org.hibernate.search.annotations.Analyzer analyzerAnn =
+ annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
+ return getAnalyzer( analyzerAnn, context );
+ }
+
+ private Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn,
InitContext context) {
+ Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
+ if ( analyzerClass == void.class ) {
+ String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
+ if ( StringHelper.isEmpty( definition ) ) {
+ return null;
+ }
+ else {
+
+ return context.buildLazyAnalyzer( definition );
+ }
+ }
+ else {
+ try {
+ return ( Analyzer ) analyzerClass.newInstance();
+ }
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() +
": " + analyzerClass.getName(),
+ e
+ );
+ }
+ catch ( Exception e ) {
+ throw new SearchException(
+ "Failed to instantiate lucene analyzer with type " +
analyzerClass.getName(), e
+ );
+ }
+ }
+ }
+
+ private void initializeMembers(XClass clazz, PropertiesMetadata propertiesMetadata,
boolean isRoot, String prefix,
+ Set<XClass> processedClasses, InitContext context) {
+ List<XClass> hierarchy = new ArrayList<XClass>();
+ for ( XClass currClass = clazz; currClass != null; currClass =
currClass.getSuperclass() ) {
+ hierarchy.add( currClass );
+ }
+ for ( int index = hierarchy.size() - 1; index >= 0; index-- ) {
+ XClass currClass = hierarchy.get( index );
+ /*
+ * Override the default analyzer for the properties if the class hold one
+ * That's the reason we go down the hierarchy
+ */
+ Analyzer analyzer = getAnalyzer( currClass, context );
+
+ if ( analyzer != null ) {
+ propertiesMetadata.analyzer = analyzer;
+ }
+ checkForAnalyzerDefs( currClass, context );
+
+ // Check for any ClassBridges annotation.
+ ClassBridges classBridgesAnn = currClass.getAnnotation( ClassBridges.class );
+ if ( classBridgesAnn != null ) {
+ ClassBridge[] cbs = classBridgesAnn.value();
+ for ( ClassBridge cb : cbs ) {
+ bindClassAnnotation( prefix, propertiesMetadata, cb, context );
+ }
+ }
+
+ // Check for any ClassBridge style of annotations.
+ ClassBridge classBridgeAnn = currClass.getAnnotation( ClassBridge.class );
+ if ( classBridgeAnn != null ) {
+ bindClassAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
+ }
+
+ //Get similarity
+ //TODO: similarity form @IndexedEmbedded are not taken care of. Exception??
+ if ( isRoot ) {
+ checkForSimilarity( currClass );
+ }
+
+ // rejecting non properties (ie regular methods) because the object is loaded from
Hibernate,
+ // so indexing a non property does not make sense
+ List<XProperty> methods = currClass.getDeclaredProperties(
XClass.ACCESS_PROPERTY );
+ for ( XProperty method : methods ) {
+ initializeMember( method, propertiesMetadata, isRoot, prefix, processedClasses,
context );
+ }
+
+ List<XProperty> fields = currClass.getDeclaredProperties( XClass.ACCESS_FIELD
);
+ for ( XProperty field : fields ) {
+ initializeMember( field, propertiesMetadata, isRoot, prefix, processedClasses,
context );
+ }
+ }
+ }
+
+ private void initializeMember(XProperty member, PropertiesMetadata propertiesMetadata,
boolean isRoot,
+ String prefix, Set<XClass> processedClasses, InitContext context) {
+ checkDocumentId( member, propertiesMetadata, isRoot, prefix, context );
+ checkForField( member, propertiesMetadata, prefix, context );
+ checkForFields( member, propertiesMetadata, prefix, context );
+ checkForAnalyzerDefs( member, context );
+ checkForIndexedEmbedded( member, propertiesMetadata, prefix, processedClasses, context
);
+ checkForContainedIn( member, propertiesMetadata );
+ }
+
+ private void checkForAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext
context) {
+ AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
+ if ( defs != null ) {
+ for ( AnalyzerDef def : defs.value() ) {
+ context.addAnalyzerDef( def );
+ }
+ }
+ AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
+ context.addAnalyzerDef( def );
+ }
+
+ public String getIdentifierName() {
+ return idGetter.getName();
+ }
+
+ public Similarity getSimilarity() {
+ return similarity;
+ }
+
+ private void checkForFields(XProperty member, PropertiesMetadata propertiesMetadata,
String prefix, InitContext context) {
+ org.hibernate.search.annotations.Fields fieldsAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Fields.class );
+ if ( fieldsAnn != null ) {
+ for ( org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value() ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ }
+
+ private void checkForSimilarity(XClass currClass) {
+ org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation(
org.hibernate.search.annotations.Similarity.class );
+ if ( similarityAnn != null ) {
+ if ( similarity != null ) {
+ throw new SearchException(
+ "Multiple Similarities defined in the same class hierarchy: " +
beanClass.getName()
+ );
+ }
+ Class similarityClass = similarityAnn.impl();
+ try {
+ similarity = ( Similarity ) similarityClass.newInstance();
+ }
+ catch ( Exception e ) {
+ log.error(
+ "Exception attempting to instantiate Similarity '{}' set for
{}",
+ similarityClass.getName(), beanClass.getName()
+ );
+ }
+ }
+ }
+
+ private void checkForField(XProperty member, PropertiesMetadata propertiesMetadata,
String prefix, InitContext context) {
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Field.class );
+ if ( fieldAnn != null ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+
+ private void checkForContainedIn(XProperty member, PropertiesMetadata
propertiesMetadata) {
+ ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
+ if ( containedAnn != null ) {
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.containedInGetters.add( member );
+ }
+ }
+
+ private void checkForIndexedEmbedded(XProperty member, PropertiesMetadata
propertiesMetadata, String prefix, Set<XClass> processedClasses, InitContext
context) {
+ IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
+ if ( embeddedAnn != null ) {
+ int oldMaxLevel = maxLevel;
+ int potentialLevel = embeddedAnn.depth() + level;
+ if ( potentialLevel < 0 ) {
+ potentialLevel = Integer.MAX_VALUE;
+ }
+ maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
+ level++;
+
+ XClass elementClass;
+ if ( void.class == embeddedAnn.targetElement() ) {
+ elementClass = member.getElementClass();
+ }
+ else {
+ elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
+ }
+ if ( maxLevel == Integer.MAX_VALUE //infinite
+ && processedClasses.contains( elementClass ) ) {
+ throw new SearchException(
+ "Circular reference. Duplicate use of "
+ + elementClass.getName()
+ + " in root entity " + beanClass.getName()
+ + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
+ );
+ }
+ if ( level <= maxLevel ) {
+ processedClasses.add( elementClass ); //push
+
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.embeddedGetters.add( member );
+ PropertiesMetadata metadata = new PropertiesMetadata();
+ propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
+ metadata.boost = getBoost( member, null );
+ //property > entity analyzer
+ Analyzer analyzer = getAnalyzer( member, context );
+ metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ initializeMembers( elementClass, metadata, false, localPrefix, processedClasses,
context );
+ /**
+ * We will only index the "expected" type but that's OK, HQL cannot do
downcasting either
+ */
+ if ( member.isArray() ) {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
+ }
+ else if ( member.isCollection() ) {
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum subclasses etc etc??
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION
);
+ }
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
+ }
+
+ processedClasses.remove( elementClass ); //pop
+ }
+ else if ( log.isTraceEnabled() ) {
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ log.trace( "depth reached, ignoring {}", localPrefix );
+ }
+
+ level--;
+ maxLevel = oldMaxLevel; //set back the the old max level
+ }
+ }
+
+ private void checkDocumentId(XProperty member, PropertiesMetadata propertiesMetadata,
boolean isRoot, String prefix, InitContext context) {
+ Annotation idAnnotation = getIdAnnotation( member, context );
+ if ( idAnnotation != null ) {
+ String attributeName = getIdAttributeName( member, idAnnotation );
+ if ( isRoot ) {
+ if ( idKeywordName != null && explicitDocumentId ) {
+ throw new AssertionFailure(
+ "Two document id assigned: "
+ + idKeywordName + " and " + attributeName
+ );
+ }
+ idKeywordName = prefix + attributeName;
+ FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager
);
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = ( TwoWayFieldBridge ) fieldBridge;
+ }
+ else {
+ throw new SearchException(
+ "Bridge for document id does not implement TwoWayFieldBridge: " +
member.getName()
+ );
+ }
+ idBoost = getBoost( member, null );
+ ReflectionHelper.setAccessible( member );
+ idGetter = member;
+ }
+ else {
+ //component should index their document id
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + attributeName;
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
+ propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member,
reflectionManager ) );
+ propertiesMetadata.fieldBoosts.add( getBoost( member, null ) );
+ // property > entity analyzer (no field analyzer)
+ Analyzer analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) {
+ analyzer = propertiesMetadata.analyzer;
+ }
+ if ( analyzer == null ) {
+ throw new AssertionFailure( "Analizer should not be undefined" );
+ }
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+ }
+
+ /**
+ * Checks whether the specified property contains an annotation used as document id.
+ * This can either be an explicit <code>@DocumentId</code> or if no
<code>@DocumentId</code> is specified a
+ * JPA <code>@Id</code> annotation. The check for the JPA annotation is
indirectly to avoid a hard dependency
+ * to Hibernate Annotations.
+ *
+ * @param member the property to check for the id annotation.
+ * @param context Handle to default configuration settings.
+ *
+ * @return the annotation used as document id or <code>null</code> if id
annotation is specified on the property.
+ */
+ private Annotation getIdAnnotation(XProperty member, InitContext context) {
+ // check for explicit DocumentId
+ Annotation documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ explicitDocumentId = true;
+ return documentIdAnn;
+ }
+
+ // check for JPA @Id
+ if ( !explicitDocumentId && context.isJpaPresent() ) {
+ Class idClass;
+ try {
+ idClass = org.hibernate.util.ReflectHelper.classForName(
"javax.persistence.Id", InitContext.class );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException( "Unable to load @Id.class even though it should be
present ?!" );
+ }
+ documentIdAnn = member.getAnnotation( idClass );
+ if ( documentIdAnn != null ) {
+ log.debug( "Found JPA id and using it as document id" );
+ }
+ }
+ return documentIdAnn;
+ }
+
+ /**
+ * Determines the property name for the document id. It is either the name of the
property itself or the
+ * value of the name attribute of the <code>idAnnotation</code>.
+ *
+ * @param member the property used as id property.
+ * @param idAnnotation the id annotation
+ *
+ * @return property name to be used as document id.
+ */
+ private String getIdAttributeName(XProperty member, Annotation idAnnotation) {
+ String name = null;
+ try {
+ Method m = idAnnotation.getClass().getMethod( "name" );
+ name = ( String ) m.invoke( idAnnotation );
+ }
+ catch ( Exception e ) {
+ // ignore
+ }
+
+ return ReflectionHelper.getAttributeName( member, name );
+ }
+
+ private void bindClassAnnotation(String prefix, PropertiesMetadata propertiesMetadata,
ClassBridge ann, InitContext context) {
+ //FIXME name should be prefixed
+ String fieldName = prefix + ann.name();
+ propertiesMetadata.classNames.add( fieldName );
+ propertiesMetadata.classStores.add( getStore( ann.store() ) );
+ propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
+ propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
+ propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
+ propertiesMetadata.classBoosts.add( ann.boost().value() );
+
+ Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
+ if ( analyzer == null ) {
+ analyzer = propertiesMetadata.analyzer;
+ }
+ if ( analyzer == null ) {
+ throw new AssertionFailure( "Analyzer should not be undefined" );
+ }
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+
+ private void bindFieldAnnotation(XProperty member, PropertiesMetadata
propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn,
InitContext context) {
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + ReflectionHelper.getAttributeName( member, fieldAnn.name()
);
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
+ propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
+ propertiesMetadata.fieldBoosts.add( getBoost( member, fieldAnn ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member,
reflectionManager ) );
+
+ // Field > property > entity analyzer
+ Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
+ if ( analyzer == null ) {
+ analyzer = getAnalyzer( member, context );
+ }
+ if ( analyzer != null ) {
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+
+ private Float getBoost(XProperty member, org.hibernate.search.annotations.Field
fieldAnn) {
+ float computedBoost = 1.0f;
+ Boost boostAnn = member.getAnnotation( Boost.class );
+ if ( boostAnn != null ) {
+ computedBoost = boostAnn.value();
+ }
+ if ( fieldAnn != null ) {
+ computedBoost *= fieldAnn.boost().value();
+ }
+ return computedBoost;
+ }
+
+ private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty
member) {
+ String localPrefix = prefix;
+ if ( ".".equals( embeddedAnn.prefix() ) ) {
+ //default to property name
+ localPrefix += member.getName() + '.';
+ }
+ else {
+ localPrefix += embeddedAnn.prefix();
+ }
+ return localPrefix;
+ }
+
+ private Field.Store getStore(Store store) {
+ switch ( store ) {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure( "Unexpected Store: " + store );
+ }
+ }
+
+ private Field.TermVector getTermVector(TermVector vector) {
+ switch ( vector ) {
+ case NO:
+ return Field.TermVector.NO;
+ case YES:
+ return Field.TermVector.YES;
+ case WITH_OFFSETS:
+ return Field.TermVector.WITH_OFFSETS;
+ case WITH_POSITIONS:
+ return Field.TermVector.WITH_POSITIONS;
+ case WITH_POSITION_OFFSETS:
+ return Field.TermVector.WITH_POSITIONS_OFFSETS;
+ default:
+ throw new AssertionFailure( "Unexpected TermVector: " + vector );
+ }
+ }
+
+ private Field.Index getIndex(Index index) {
+ switch ( index ) {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NOT_ANALYZED_NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.ANALYZED;
+ case UN_TOKENIZED:
+ return Field.Index.NOT_ANALYZED;
+ default:
+ throw new AssertionFailure( "Unexpected Index: " + index );
+ }
+ }
+
+ private Float getBoost(XClass element) {
+ if ( element == null ) {
+ return null;
+ }
+ Boost boost = element.getAnnotation( Boost.class );
+ return boost != null ?
+ boost.value() :
+ null;
+ }
+
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class<T> entityClass, T entity, Serializable id,
WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor
searchFactoryImplementor) {
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to
be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ if ( workType.searchForContainers() ) {
+ processContainedIn( entity, queue, metadata, searchFactoryImplementor );
+ }
+ }
+
+ protected void processContainedIn(Object instance, List<LuceneWork> queue,
PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
+ for ( int i = 0; i < metadata.containedInGetters.size(); i++ ) {
+ XMember member = metadata.containedInGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( instance, member );
+ if ( value == null ) {
+ continue;
+ }
+
+ if ( member.isArray() ) {
+ for ( Object arrayValue : ( Object[] ) value ) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class<?> valueClass = Hibernate.getClass( arrayValue );
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( valueClass );
+ if ( builderIndexedEntity == null ) {
+ continue;
+ }
+ processContainedInValue( arrayValue, queue, valueClass,
+ builderIndexedEntity, searchFactoryImplementor );
+ }
+ }
+ else if ( member.isCollection() ) {
+ Collection collection;
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum
+ collection = ( ( Map ) value ).values();
+ }
+ else {
+ collection = ( Collection ) value;
+ }
+ for ( Object collectionValue : collection ) {
+ //highly inneficient but safe wrt the actual targeted class
+ Class<?> valueClass = Hibernate.getClass( collectionValue );
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( valueClass );
+ if ( builderIndexedEntity == null ) {
+ continue;
+ }
+ processContainedInValue( collectionValue, queue, valueClass,
+ builderIndexedEntity, searchFactoryImplementor );
+ }
+ }
+ else {
+ Class<?> valueClass = Hibernate.getClass( value );
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( valueClass );
+ if ( builderIndexedEntity == null ) {
+ continue;
+ }
+ processContainedInValue( value, queue, valueClass, builderIndexedEntity,
searchFactoryImplementor );
+ }
+ }
+ //an embedded cannot have a useful @ContainedIn (no shared reference)
+ //do not walk through them
+ }
+
+ private void processContainedInValue(Object value, List<LuceneWork> queue,
Class<?> valueClass,
+ DocumentBuilderIndexedEntity builderIndexedEntity, SearchFactoryImplementor
searchFactoryImplementor) {
+ Serializable id = ( Serializable ) ReflectionHelper.getMemberValue( value,
builderIndexedEntity.idGetter );
+ builderIndexedEntity.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue,
searchFactoryImplementor );
+ }
+
+ public Document getDocument(T instance, Serializable id) {
+ Document doc = new Document();
+ final Class<?> entityType = Hibernate.getClass( instance );
+ //XClass instanceClass = reflectionManager.toXClass( entityType );
+ if ( metadata.boost != null ) {
+ doc.setBoost( metadata.boost );
+ }
+ {
+ Field classField =
+ new Field(
+ CLASS_FIELDNAME,
+ entityType.getName(),
+ Field.Store.YES,
+ Field.Index.NOT_ANALYZED,
+ Field.TermVector.NO
+ );
+ doc.add( classField );
+ LuceneOptions luceneOptions = new LuceneOptionsImpl(
+ Field.Store.YES,
+ Field.Index.NOT_ANALYZED, Field.TermVector.NO, idBoost
+ );
+ idBridge.set( idKeywordName, id, doc, luceneOptions );
+ }
+ buildDocumentFields( instance, doc, metadata );
+ return doc;
+ }
+
+ private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata
propertiesMetadata) {
+ if ( instance == null ) {
+ return;
+ }
+ //needed for field access: I cannot work in the proxied version
+ Object unproxiedInstance = unproxy( instance );
+ for ( int i = 0; i < propertiesMetadata.classBridges.size(); i++ ) {
+ FieldBridge fb = propertiesMetadata.classBridges.get( i );
+ fb.set(
+ propertiesMetadata.classNames.get( i ), unproxiedInstance,
+ doc, propertiesMetadata.getClassLuceneOptions( i )
+ );
+ }
+ for ( int i = 0; i < propertiesMetadata.fieldNames.size(); i++ ) {
+ XMember member = propertiesMetadata.fieldGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( unproxiedInstance, member );
+ propertiesMetadata.fieldBridges.get( i ).set(
+ propertiesMetadata.fieldNames.get( i ), value, doc,
+ propertiesMetadata.getFieldLuceneOptions( i )
+ );
+ }
+ for ( int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++ ) {
+ XMember member = propertiesMetadata.embeddedGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( unproxiedInstance, member );
+ //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
+
+ if ( value == null ) {
+ continue;
+ }
+ PropertiesMetadata embeddedMetadata =
propertiesMetadata.embeddedPropertiesMetadata.get( i );
+ switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
+ case ARRAY:
+ for ( Object arrayValue : ( Object[] ) value ) {
+ buildDocumentFields( arrayValue, doc, embeddedMetadata );
+ }
+ break;
+ case COLLECTION:
+ for ( Object collectionValue : ( Collection ) value ) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case MAP:
+ for ( Object collectionValue : ( ( Map ) value ).values() ) {
+ buildDocumentFields( collectionValue, doc, embeddedMetadata );
+ }
+ break;
+ case OBJECT:
+ buildDocumentFields( value, doc, embeddedMetadata );
+ break;
+ default:
+ throw new AssertionFailure(
+ "Unknown embedded container: "
+ + propertiesMetadata.embeddedContainers.get( i )
+ );
+ }
+ }
+ }
+
+ private Object unproxy(Object value) {
+ //FIXME this service should be part of Core?
+ if ( value instanceof HibernateProxy ) {
+ // .getImplementation() initializes the data by side effect
+ value = ( ( HibernateProxy ) value ).getHibernateLazyInitializer()
+ .getImplementation();
+ }
+ return value;
+ }
+
+ public Term getTerm(Serializable id) {
+ if ( idProvided ) {
+ return new Term( idKeywordName, ( String ) id );
+ }
+
+ return new Term( idKeywordName, idBridge.objectToString( id ) );
+ }
+
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
+
+ public TwoWayFieldBridge getIdBridge() {
+ return idBridge;
+ }
+
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
+
+ public static Class getDocumentClass(Document document) {
+ String className = document.get( DocumentBuilderContainedEntity.CLASS_FIELDNAME );
+ try {
+ return ReflectHelper.classForName( className );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException( "Unable to load indexed class: " + className, e
);
+ }
+ }
+
+ public static Serializable getDocumentId(SearchFactoryImplementor
searchFactoryImplementor, Class<?> clazz, Document document) {
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz );
+ if ( builderIndexedEntity == null ) {
+ throw new SearchException( "No Lucene configuration set up for: " +
clazz.getName() );
+ }
+ return ( Serializable ) builderIndexedEntity.getIdBridge().get(
builderIndexedEntity.getIdKeywordName(), document );
+ }
+
+ public static Object[] getDocumentFields(SearchFactoryImplementor
searchFactoryImplementor, Class<?> clazz, Document document, String[] fields) {
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz );
+ if ( builderIndexedEntity == null ) {
+ throw new SearchException( "No Lucene configuration set up for: " +
clazz.getName() );
+ }
+ final int fieldNbr = fields.length;
+ Object[] result = new Object[fieldNbr];
+
+ if ( builderIndexedEntity.idKeywordName != null ) {
+ populateResult( builderIndexedEntity.idKeywordName, builderIndexedEntity.idBridge,
Field.Store.YES, fields, result, document );
+ }
+
+ final PropertiesMetadata metadata = builderIndexedEntity.metadata;
+ processFieldsForProjection( metadata, fields, result, document );
+ return result;
+ }
+
+ private static void processFieldsForProjection(PropertiesMetadata metadata, String[]
fields, Object[] result, Document document) {
+ final int nbrFoEntityFields = metadata.fieldNames.size();
+ for ( int index = 0; index < nbrFoEntityFields; index++ ) {
+ populateResult(
+ metadata.fieldNames.get( index ),
+ metadata.fieldBridges.get( index ),
+ metadata.fieldStore.get( index ),
+ fields,
+ result,
+ document
+ );
+ }
+ final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
+ for ( int index = 0; index < nbrOfEmbeddedObjects; index++ ) {
+ //there is nothing we can do for collections
+ if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT )
{
+ processFieldsForProjection(
+ metadata.embeddedPropertiesMetadata.get( index ), fields, result, document
+ );
+ }
+ }
+ }
+
+ private static void populateResult(String fieldName, FieldBridge fieldBridge,
Field.Store store,
+ String[] fields, Object[] result, Document document) {
+ int matchingPosition = getFieldPosition( fields, fieldName );
+ if ( matchingPosition != -1 ) {
+ //TODO make use of an isTwoWay() method
+ if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom(
fieldBridge.getClass() ) ) {
+ result[matchingPosition] = ( ( TwoWayFieldBridge ) fieldBridge ).get( fieldName,
document );
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Field {} projected as {}", fieldName, result[matchingPosition]
);
+ }
+ }
+ else {
+ if ( store == Field.Store.NO ) {
+ throw new SearchException( "Projecting an unstored field: " + fieldName
);
+ }
+ else {
+ throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " +
fieldBridge.getClass() );
+ }
+ }
+ }
+ }
+
+ private static int getFieldPosition(String[] fields, String fieldName) {
+ int fieldNbr = fields.length;
+ for ( int index = 0; index < fieldNbr; index++ ) {
+ if ( fieldName.equals( fields[index] ) ) {
+ return index;
+ }
+ }
+ return -1;
+ }
+
+ public void postInitialize(Set<Class<?>> indexedClasses) {
+ if ( entityState == EntityState.NON_INDEXABLE ) {
+ throw new AssertionFailure( "A non indexed entity is post processed" );
+ }
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass( beanClass );
+ Set<Class<?>> tempMappedSubclasses = new HashSet<Class<?>>();
+ //together with the caller this creates a o(2), but I think it's still faster than
create the up hierarchy for each class
+ for ( Class currentClass : indexedClasses ) {
+ if ( plainClass.isAssignableFrom( currentClass ) ) {
+ tempMappedSubclasses.add( currentClass );
+ }
+ }
+ this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
+ Class superClass = plainClass.getSuperclass();
+ this.isRoot = true;
+ while ( superClass != null ) {
+ if ( indexedClasses.contains( superClass ) ) {
+ this.isRoot = false;
+ break;
+ }
+ superClass = superClass.getSuperclass();
+ }
+ this.reflectionManager = null;
+ }
+
+ public EntityState getEntityState() {
+ return entityState;
+ }
+
+ public Set<Class<?>> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
+
+ /**
+ * Make sure to return false if there is a risk of composite id
+ * if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ */
+ public boolean isSafeFromTupleId() {
+ return safeFromTupleId;
+ }
+
+ /**
+ * Wrapper class containing all the meta data extracted out of the entities.
+ */
+ protected static class PropertiesMetadata {
+ public Float boost;
+ public Analyzer analyzer;
+ public final List<String> fieldNames = new ArrayList<String>();
+ public final List<XMember> fieldGetters = new ArrayList<XMember>();
+ public final List<FieldBridge> fieldBridges = new
ArrayList<FieldBridge>();
+ public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+ public final List<Float> fieldBoosts = new ArrayList<Float>();
+ public final List<Field.TermVector> fieldTermVectors = new
ArrayList<Field.TermVector>();
+ public final List<XMember> embeddedGetters = new ArrayList<XMember>();
+ public final List<PropertiesMetadata> embeddedPropertiesMetadata = new
ArrayList<PropertiesMetadata>();
+ public final List<Container> embeddedContainers = new
ArrayList<Container>();
+ public final List<XMember> containedInGetters = new ArrayList<XMember>();
+ public final List<String> classNames = new ArrayList<String>();
+ public final List<Field.Store> classStores = new ArrayList<Field.Store>();
+ public final List<Field.Index> classIndexes = new
ArrayList<Field.Index>();
+ public final List<FieldBridge> classBridges = new
ArrayList<FieldBridge>();
+ public final List<Field.TermVector> classTermVectors = new
ArrayList<Field.TermVector>();
+ public final List<Float> classBoosts = new ArrayList<Float>();
+
+ public enum Container {
+ OBJECT,
+ COLLECTION,
+ MAP,
+ ARRAY
+ }
+
+ protected LuceneOptions getClassLuceneOptions(int i) {
+ return new LuceneOptionsImpl(
+ classStores.get( i ),
+ classIndexes.get( i ), classTermVectors.get( i ), classBoosts.get( i )
+ );
+ }
+
+ protected LuceneOptions getFieldLuceneOptions(int i) {
+ LuceneOptions options;
+ options = new LuceneOptionsImpl(
+ fieldStore.get( i ),
+ fieldIndex.get( i ), fieldTermVectors.get( i ), fieldBoosts.get( i )
+ );
+ return options;
+ }
+ }
+}
\ No newline at end of file
Property changes on:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Copied:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java (from
rev 15603, search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java)
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
(rev 0)
+++
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -0,0 +1,162 @@
+//$Id$
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.document.Document;
+
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.ProvidedId;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.bridge.BridgeFactory;
+import org.hibernate.search.impl.InitContext;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * Set up and provide a manager for indexed classes.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Richard Hallier
+ * @author Hardy Ferentschik
+ */
+public class DocumentBuilderIndexedEntity<T> extends
DocumentBuilderContainedEntity<T> {
+
+ private final DirectoryProvider[] directoryProviders;
+ private final IndexShardingStrategy shardingStrategy;
+
+ /**
+ * Constructor used on an @Indexed entity.
+ */
+ public DocumentBuilderIndexedEntity(XClass clazz, InitContext context,
DirectoryProvider[] directoryProviders,
+ IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
+
+ super( clazz, context, reflectionManager );
+
+ this.entityState = EntityState.INDEXED;
+ this.directoryProviders = directoryProviders;
+ this.shardingStrategy = shardingStrategy;
+
+ if ( idKeywordName == null ) {
+ // if no DocumentId then check if we have a ProvidedId instead
+ ProvidedId provided = findProvidedId( clazz, reflectionManager );
+ if ( provided == null ) {
+ throw new SearchException( "No document id in: " + clazz.getName() );
+ }
+
+ idBridge = BridgeFactory.extractTwoWayType( provided.bridge() );
+ idKeywordName = provided.name();
+ }
+ }
+
+ private ProvidedId findProvidedId(XClass clazz, ReflectionManager reflectionManager) {
+ ProvidedId id = null;
+ XClass currentClass = clazz;
+ while ( id == null && ( !reflectionManager.equals( currentClass, Object.class )
) ) {
+ id = currentClass.getAnnotation( ProvidedId.class );
+ currentClass = clazz.getSuperclass();
+ }
+ return id;
+ }
+
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class<T> entityClass, T entity, Serializable id,
WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor
searchFactoryImplementor) {
+ //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work
recognition
+
+ List<LuceneWork> toDelete = new ArrayList<LuceneWork>();
+ boolean duplicateDelete = false;
+ for ( LuceneWork luceneWork : queue ) {
+ //avoid unecessary duplicated work
+ if ( luceneWork.getEntityClass() == entityClass
+ ) {
+ Serializable currentId = luceneWork.getId();
+ //currentId != null => either ADD or Delete work
+ if ( currentId != null && currentId.equals( id ) ) { //find a way to use
Type.equals(x,y)
+ if ( workType == WorkType.DELETE ) { //TODO add PURGE?
+ //DELETE should have precedence over any update before (HSEARCH-257)
+ //if an Add work is here, remove it
+ //if an other delete is here remember but still search for Add
+ if ( luceneWork instanceof AddLuceneWork ) {
+ toDelete.add( luceneWork );
+ }
+ else if ( luceneWork instanceof DeleteLuceneWork ) {
+ duplicateDelete = true;
+ }
+ }
+ else {
+ //we can safely say we are out, the other work is an ADD
+ return;
+ }
+ }
+ //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
+ }
+ }
+ for ( LuceneWork luceneWork : toDelete ) {
+ queue.remove( luceneWork );
+ }
+ if ( duplicateDelete ) {
+ return;
+ }
+
+ String idInString = idBridge.objectToString( id );
+ if ( workType == WorkType.ADD ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ }
+ else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ }
+ else if ( workType == WorkType.PURGE_ALL ) {
+ queue.add( new PurgeAllLuceneWork( entityClass ) );
+ }
+ else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
+ Document doc = getDocument( entity, id );
+ /**
+ * even with Lucene 2.1, use of indexWriter to update is not an option
+ * We can only delete by term, and the index doesn't have a term that
+ * uniquely identify the entry.
+ * But essentially the optimization we are doing is the same Lucene is doing, the only
extra cost is the
+ * double file opening.
+ */
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc ) );
+ }
+ else if ( workType == WorkType.INDEX ) {
+ Document doc = getDocument( entity, id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( new AddLuceneWork( id, idInString, entityClass, doc, true ) );
+ }
+ else {
+ throw new AssertionFailure( "Unknown WorkType: " + workType );
+ }
+
+ super.addWorkToQueue(entityClass, entity, id, workType, queue,
searchFactoryImplementor);
+ }
+
+ public DirectoryProvider[] getDirectoryProviders() {
+ if ( entityState != EntityState.INDEXED ) {
+ throw new AssertionFailure( "Contained in only entity: getDirectoryProvider
should not have been called." );
+ }
+ return directoryProviders;
+ }
+
+ public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
+ if ( entityState != EntityState.INDEXED ) {
+ throw new AssertionFailure(
+ "Contained in only entity: getDirectoryProviderSelectionStrategy should not
have been called."
+ );
+ }
+ return shardingStrategy;
+ }
+}
Property changes on:
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -52,7 +52,7 @@
// set up the field selector. CLASS_FIELDNAME and id fields are needed on top of any
projected fields
Map<String, FieldSelectorResult> fields = new HashMap<String,
FieldSelectorResult>( 1 + idFieldNames.size() + projectionSize );
- fields.put( DocumentBuilder.CLASS_FIELDNAME, FieldSelectorResult.LOAD );
+ fields.put( DocumentBuilderIndexedEntity.CLASS_FIELDNAME, FieldSelectorResult.LOAD );
for ( String idFieldName : idFieldNames ) {
fields.put( idFieldName, FieldSelectorResult.LOAD );
}
@@ -65,11 +65,11 @@
}
private EntityInfo extract(Document document) {
- Class clazz = DocumentBuilder.getDocumentClass( document );
- Serializable id = DocumentBuilder.getDocumentId( searchFactoryImplementor, clazz,
document );
+ Class clazz = DocumentBuilderIndexedEntity.getDocumentClass( document );
+ Serializable id = DocumentBuilderIndexedEntity.getDocumentId( searchFactoryImplementor,
clazz, document );
Object[] projected = null;
if ( projection != null && projection.length > 0 ) {
- projected = DocumentBuilder.getDocumentFields( searchFactoryImplementor, clazz,
document, projection );
+ projected = DocumentBuilderIndexedEntity.getDocumentFields( searchFactoryImplementor,
clazz, document, projection );
}
return new EntityInfo( clazz, id, projected );
}
Modified: search/trunk/src/java/org/hibernate/search/engine/MultiClassesQueryLoader.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/engine/MultiClassesQueryLoader.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/engine/MultiClassesQueryLoader.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -35,7 +35,7 @@
// root entity could lead to quite inefficient queries in Hibernate when using
table per class
if ( entityTypes.size() == 0 ) {
//support all classes
- for( Map.Entry<Class<?>, DocumentBuilder<?>> entry :
searchFactoryImplementor.getDocumentBuilders().entrySet() ) {
+ for( Map.Entry<Class<?>, DocumentBuilderIndexedEntity<?>> entry :
searchFactoryImplementor.getDocumentBuilders().entrySet() ) {
//get only root entities to limit queries
if ( entry.getValue().isRoot() ) {
safeEntityTypes.add( entry.getKey() );
@@ -113,7 +113,7 @@
RootEntityMetadata(Class<?> rootEntity, SearchFactoryImplementor
searchFactoryImplementor, Session session) {
this.rootEntity = rootEntity;
- DocumentBuilder<?> provider = searchFactoryImplementor.getDocumentBuilder(
rootEntity );
+ DocumentBuilderIndexedEntity<?> provider =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( rootEntity );
if ( provider == null) throw new AssertionFailure("Provider not found for class:
" + rootEntity);
this.mappedSubclasses = provider.getMappedSubclasses();
this.criteria = session.createCriteria( rootEntity );
Modified: search/trunk/src/java/org/hibernate/search/engine/ObjectLoaderHelper.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/ObjectLoaderHelper.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/engine/ObjectLoaderHelper.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -46,7 +46,7 @@
final int maxResults = entityInfos.length;
if ( maxResults == 0 ) return;
- DocumentBuilder<?> builder = searchFactoryImplementor.getDocumentBuilder(
entityType );
+ DocumentBuilderIndexedEntity<?> builder =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityType );
String idName = builder.getIdentifierName();
int loop = maxResults / MAX_IN_CLAUSE;
boolean exact = maxResults % MAX_IN_CLAUSE == 0;
Modified: search/trunk/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -24,11 +24,11 @@
void setBackendQueueProcessorFactory(BackendQueueProcessorFactory
backendQueueProcessorFactory);
- Map<Class<?>, DocumentBuilder<?>> getDocumentBuilders();
+ Map<Class<?>, DocumentBuilderIndexedEntity<?>> getDocumentBuilders();
- <T> DocumentBuilder<T> getDocumentBuilder(Class<T> entityType);
+ <T> DocumentBuilderIndexedEntity<T>
getDocumentBuilderIndexedEntity(Class<T> entityType);
- <T> DocumentBuilder<T> getContainedInOnlyBuilder(Class<T>
entityType);
+ <T> DocumentBuilderContainedEntity<T>
getDocumentBuilderContainedEntity(Class<T> entityType);
Worker getWorker();
Modified:
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -74,7 +74,7 @@
if ( used ) {
final Class<?> entityType = event.getEntity().getClass();
if ( searchFactoryImplementor.getDocumentBuilders().containsKey( entityType )
- || searchFactoryImplementor.getContainedInOnlyBuilder( entityType ) != null ) {
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entityType ) != null
) {
processWork( event.getEntity(), event.getId(), WorkType.DELETE, event );
}
}
@@ -83,8 +83,8 @@
public void onPostInsert(PostInsertEvent event) {
if ( used ) {
final Object entity = event.getEntity();
- if ( searchFactoryImplementor.getDocumentBuilder( entity.getClass() ) != null
- || searchFactoryImplementor.getContainedInOnlyBuilder( entity.getClass() ) != null )
{
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) !=
null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() )
!= null ) {
Serializable id = event.getId();
processWork( entity, id, WorkType.ADD, event );
}
@@ -94,8 +94,8 @@
public void onPostUpdate(PostUpdateEvent event) {
if ( used ) {
final Object entity = event.getEntity();
- if ( searchFactoryImplementor.getDocumentBuilder( entity.getClass() ) != null
- || searchFactoryImplementor.getContainedInOnlyBuilder( entity.getClass() ) != null )
{
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) !=
null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() )
!= null ) {
Serializable id = event.getId();
processWork( entity, id, WorkType.UPDATE, event );
}
@@ -133,8 +133,8 @@
return;
}
if ( used ) {
- if ( searchFactoryImplementor.getDocumentBuilder( entity.getClass() ) != null
- || searchFactoryImplementor.getContainedInOnlyBuilder( entity.getClass() ) != null )
{
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) !=
null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() )
!= null ) {
Serializable id = getId( entity, event );
if ( id == null ) {
log.warn(
Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -48,7 +48,7 @@
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.backend.impl.EventSourceTransactionContext;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.query.FullTextQueryImpl;
import org.hibernate.search.util.ContextHelper;
@@ -62,6 +62,7 @@
* @author John Griffin
* @author Hardy Ferentschik
*/
+@SuppressWarnings( "deprecation" )
public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
private final Session session;
@@ -114,7 +115,7 @@
}
for ( Class clazz : targetedClasses ) {
- DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilder( clazz );
+ DocumentBuilderIndexedEntity builder =
searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz );
Work<T> work;
if ( id == null ) {
// purge the main entity
@@ -153,7 +154,7 @@
//TODO cache that at the FTSession level
SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
//not strictly necessary but a small optimization
- if ( searchFactoryImplementor.getDocumentBuilder( clazz ) == null ) {
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz ) == null ) {
String msg = "Entity to index is not an @Indexed entity: " +
entity.getClass().getName();
throw new IllegalArgumentException( msg );
}
Modified: search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -39,10 +39,11 @@
import org.hibernate.search.backend.WorkerFactory;
import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
import org.hibernate.search.cfg.SearchConfiguration;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.FilterDef;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.engine.EntityState;
+import org.hibernate.search.engine.DocumentBuilderContainedEntity;
import org.hibernate.search.filter.CachingWrapperFilter;
import org.hibernate.search.filter.FilterCachingStrategy;
import org.hibernate.search.filter.MRUFilterCachingStrategy;
@@ -64,8 +65,8 @@
private static final Logger log = LoggerFactory.make();
- private final Map<Class<?>, DocumentBuilder<?>> documentBuilders = new
HashMap<Class<?>, DocumentBuilder<?>>();
- private final Map<Class<?>, DocumentBuilder<?>>
containedInOnlyBuilders = new HashMap<Class<?>, DocumentBuilder<?>>();
+ private final Map<Class<?>, DocumentBuilderIndexedEntity<?>>
documentBuildersIndexedEntities = new HashMap<Class<?>,
DocumentBuilderIndexedEntity<?>>();
+ private final Map<Class<?>, DocumentBuilderContainedEntity<?>>
documentBuildersContainedEntities = new HashMap<Class<?>,
DocumentBuilderContainedEntity<?>>();
//keep track of the index modifiers per DirectoryProvider since multiple entity can use
the same directory provider
private final Map<DirectoryProvider<?>, DirectoryProviderData>
dirProviderData = new HashMap<DirectoryProvider<?>, DirectoryProviderData>();
private final Worker worker;
@@ -117,12 +118,12 @@
this.indexingStrategy = defineIndexingStrategy( cfg ); //need to be done before the
document builds
initDocumentBuilders( cfg, reflectionManager );
- Set<Class<?>> indexedClasses = documentBuilders.keySet();
- for ( DocumentBuilder builder : documentBuilders.values() ) {
+ Set<Class<?>> indexedClasses = documentBuildersIndexedEntities.keySet();
+ for ( DocumentBuilderIndexedEntity builder : documentBuildersIndexedEntities.values() )
{
builder.postInitialize( indexedClasses );
}
//not really necessary today
- for ( DocumentBuilder builder : containedInOnlyBuilders.values() ) {
+ for ( DocumentBuilderContainedEntity builder :
documentBuildersContainedEntities.values() ) {
builder.postInitialize( indexedClasses );
}
this.worker = WorkerFactory.createWorker( cfg, this );
@@ -260,24 +261,24 @@
}
- public Map<Class<?>, DocumentBuilder<?>> getDocumentBuilders() {
+ public Map<Class<?>, DocumentBuilderIndexedEntity<?>>
getDocumentBuilders() {
if ( barrier != 0 ) {
} //read barrier
- return documentBuilders;
+ return documentBuildersIndexedEntities;
}
@SuppressWarnings("unchecked")
- public <T> DocumentBuilder<T> getDocumentBuilder(Class<T> entityType)
{
+ public <T> DocumentBuilderIndexedEntity<T>
getDocumentBuilderIndexedEntity(Class<T> entityType) {
if ( barrier != 0 ) {
} //read barrier
- return ( DocumentBuilder<T> ) documentBuilders.get( entityType );
+ return ( DocumentBuilderIndexedEntity<T> ) documentBuildersIndexedEntities.get(
entityType );
}
@SuppressWarnings("unchecked")
- public <T> DocumentBuilder<T> getContainedInOnlyBuilder(Class<T>
entityType) {
+ public <T> DocumentBuilderContainedEntity<T>
getDocumentBuilderContainedEntity(Class<T> entityType) {
if ( barrier != 0 ) {
} //read barrier
- return ( DocumentBuilder<T> ) containedInOnlyBuilders.get( entityType );
+ return ( DocumentBuilderContainedEntity<T> )
documentBuildersContainedEntities.get( entityType );
}
public Set<DirectoryProvider<?>> getDirectoryProviders() {
@@ -328,7 +329,7 @@
public DirectoryProvider[] getDirectoryProviders(Class<?> entity) {
if ( barrier != 0 ) {
} //read barrier
- DocumentBuilder<?> documentBuilder = getDocumentBuilder( entity );
+ DocumentBuilderIndexedEntity<?> documentBuilder =
getDocumentBuilderIndexedEntity( entity );
return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
}
@@ -367,7 +368,7 @@
throw new IllegalArgumentException( "A class has to be specified for retrieving a
scoped analyzer" );
}
- DocumentBuilder<?> builder = documentBuilders.get( clazz );
+ DocumentBuilderIndexedEntity<?> builder = documentBuildersIndexedEntities.get(
clazz );
if ( builder == null ) {
throw new IllegalArgumentException(
"Entity for which to retrieve the scoped analyzer is not an @Indexed entity:
" + clazz.getName()
@@ -403,25 +404,25 @@
DirectoryProviderFactory.DirectoryProviders providers =
factory.createDirectoryProviders(
mappedXClass, cfg, this, reflectionManager
);
- //FIXME DocumentBuilder needs to be built by a helper method receiving Class<T>
to infer T properly
+ //FIXME DocumentBuilderIndexedEntity needs to be built by a helper method receiving
Class<T> to infer T properly
//XClass unfortunately is not (yet) genericized: TODO?
- final DocumentBuilder<?> documentBuilder = new DocumentBuilder(
+ final DocumentBuilderIndexedEntity<?> documentBuilder = new
DocumentBuilderIndexedEntity(
mappedXClass, context, providers.getProviders(), providers.getSelectionStrategy(),
reflectionManager
);
indexHierarchy.addIndexedClass( mappedClass );
- documentBuilders.put( mappedClass, documentBuilder );
+ documentBuildersIndexedEntities.put( mappedClass, documentBuilder );
}
else {
- //FIXME DocumentBuilder needs to be built by a helper method receiving Class<T>
to infer T properly
+ //FIXME DocumentBuilderIndexedEntity needs to be built by a helper method receiving
Class<T> to infer T properly
//XClass unfortunately is not (yet) genericized: TODO?
- final DocumentBuilder<?> documentBuilder = new DocumentBuilder(
+ final DocumentBuilderContainedEntity<?> documentBuilder = new
DocumentBuilderContainedEntity(
mappedXClass, context, reflectionManager
);
//TODO enhance that, I don't like to expose EntityState
if ( documentBuilder.getEntityState() != EntityState.NON_INDEXABLE ) {
- containedInOnlyBuilders.put( mappedClass, documentBuilder );
+ documentBuildersContainedEntities.put( mappedClass, documentBuilder );
}
}
bindFilterDefs( mappedXClass );
Modified: search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-21
13:58:05 UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -41,7 +41,7 @@
import org.hibernate.search.FullTextFilter;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.SearchException;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.DocumentExtractor;
import org.hibernate.search.engine.EntityInfo;
import org.hibernate.search.engine.FilterDef;
@@ -547,10 +547,10 @@
//A query filter is more practical than a manual class filtering post query (esp on
scrollable resultsets)
//it also probably minimise the memory footprint
BooleanQuery classFilter = new BooleanQuery();
- //annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
+ //annihilate the scoring impact of DocumentBuilderIndexedEntity.CLASS_FIELDNAME
classFilter.setBoost( 0 );
for ( Class clazz : classesAndSubclasses ) {
- Term t = new Term( DocumentBuilder.CLASS_FIELDNAME, clazz.getName() );
+ Term t = new Term( DocumentBuilderIndexedEntity.CLASS_FIELDNAME, clazz.getName() );
TermQuery termQuery = new TermQuery( t );
classFilter.add( termQuery, BooleanClause.Occur.SHOULD );
}
@@ -587,7 +587,7 @@
* TODO change classesAndSubclasses by side effect, which is a mismatch with the
Searcher return, fix that.
*/
private IndexSearcher buildSearcher(SearchFactoryImplementor searchFactoryImplementor)
{
- Map<Class<?>, DocumentBuilder<?>> builders =
searchFactoryImplementor.getDocumentBuilders();
+ Map<Class<?>, DocumentBuilderIndexedEntity<?>> builders =
searchFactoryImplementor.getDocumentBuilders();
List<DirectoryProvider> directories = new ArrayList<DirectoryProvider>();
Set<String> idFieldNames = new HashSet<String>();
@@ -602,7 +602,7 @@
);
}
- for ( DocumentBuilder builder : builders.values() ) {
+ for ( DocumentBuilderIndexedEntity builder : builders.values() ) {
searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
if ( builder.getIdKeywordName() != null ) {
idFieldNames.add( builder.getIdKeywordName() );
@@ -618,14 +618,14 @@
Set<Class<?>> involvedClasses = new HashSet<Class<?>>(
targetedEntities.size() );
involvedClasses.addAll( targetedEntities );
for ( Class<?> clazz : targetedEntities ) {
- DocumentBuilder<?> builder = builders.get( clazz );
+ DocumentBuilderIndexedEntity<?> builder = builders.get( clazz );
if ( builder != null ) {
involvedClasses.addAll( builder.getMappedSubclasses() );
}
}
for ( Class clazz : involvedClasses ) {
- DocumentBuilder builder = builders.get( clazz );
+ DocumentBuilderIndexedEntity builder = builders.get( clazz );
//TODO should we rather choose a polymorphic path and allow non mapped entities
if ( builder == null ) {
throw new HibernateException( "Not a mapped entity (don't forget to add
@Indexed): " + clazz );
@@ -685,7 +685,7 @@
}
}
- private Similarity checkSimilarity(Similarity similarity, DocumentBuilder builder) {
+ private Similarity checkSimilarity(Similarity similarity, DocumentBuilderIndexedEntity
builder) {
if ( similarity == null ) {
similarity = builder.getSimilarity();
}
Deleted: search/trunk/src/java/org/hibernate/search/util/BinderHelper.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/util/BinderHelper.java 2008-11-21 13:58:05
UTC (rev 15604)
+++ search/trunk/src/java/org/hibernate/search/util/BinderHelper.java 2008-11-23 10:48:59
UTC (rev 15605)
@@ -1,23 +0,0 @@
-//$Id$
-package org.hibernate.search.util;
-
-import org.hibernate.annotations.common.reflection.XMember;
-import org.hibernate.util.StringHelper;
-
-/**
- * @author Emmanuel Bernard
- */
-public abstract class BinderHelper {
-
- private BinderHelper() {
- }
-
- /**
- * Get attribute name out of member unless overriden by <code>name</code>
- */
- public static String getAttributeName(XMember member, String name) {
- return StringHelper.isNotEmpty( name ) ?
- name :
- member.getName(); //explicit field name
- }
-}
Copied: search/trunk/src/java/org/hibernate/search/util/ReflectionHelper.java (from rev
15603, search/trunk/src/java/org/hibernate/search/util/BinderHelper.java)
===================================================================
--- search/trunk/src/java/org/hibernate/search/util/ReflectionHelper.java
(rev 0)
+++ search/trunk/src/java/org/hibernate/search/util/ReflectionHelper.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -0,0 +1,48 @@
+//$Id$
+package org.hibernate.search.util;
+
+import java.lang.reflect.Modifier;
+
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public abstract class ReflectionHelper {
+
+ private ReflectionHelper() {
+ }
+
+ /**
+ * Get attribute name out of member unless overriden by <code>name</code>.
+ *
+ * @param member <code>XMember</code> from which to extract the name.
+ * @param name Override value which will be returned in case it is not empty.
+ *
+ * @return attribute name out of member unless overriden by
<code>name</code>.
+ */
+ public static String getAttributeName(XMember member, String name) {
+ return StringHelper.isNotEmpty( name ) ?
+ name :
+ member.getName(); //explicit field name
+ }
+
+ public static void setAccessible(XMember member) {
+ if ( !Modifier.isPublic( member.getModifiers() ) ) {
+ member.setAccessible( true );
+ }
+ }
+
+ public static Object getMemberValue(Object bean, XMember getter) {
+ Object value;
+ try {
+ value = getter.invoke( bean );
+ }
+ catch ( Exception e ) {
+ throw new IllegalStateException( "Could not get property value", e );
+ }
+ return value;
+ }
+}
Property changes on:
search/trunk/src/java/org/hibernate/search/util/ReflectionHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Modified:
search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -56,7 +56,7 @@
}
public void testSelectionOfShardingStrategy() throws Exception {
- IndexShardingStrategy shardingStrategy = getSearchFactory().getDocumentBuilder(
Document.class )
+ IndexShardingStrategy shardingStrategy =
getSearchFactory().getDocumentBuilderIndexedEntity( Document.class )
.getDirectoryProviderSelectionStrategy();
assertNotNull( shardingStrategy );
assertEquals( shardingStrategy.getClass(), UselessShardingStrategy.class );
Modified: search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -23,7 +23,7 @@
import org.hibernate.search.Environment;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.impl.jms.JMSBackendQueueProcessorFactory;
@@ -54,7 +54,7 @@
s.close();
//create the work queue to send
Document doc = new Document();
- Field field = new Field( DocumentBuilder.CLASS_FIELDNAME, ts.getClass().getName(),
Field.Store.YES, Field.Index.NOT_ANALYZED );
+ Field field = new Field( DocumentBuilderIndexedEntity.CLASS_FIELDNAME,
ts.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED );
doc.add( field );
field = new Field("id", "1", Field.Store.YES,
Field.Index.NOT_ANALYZED );
doc.add( field );
Modified:
search/trunk/src/test/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java 2008-11-21
13:58:05 UTC (rev 15604)
+++
search/trunk/src/test/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java 2008-11-23
10:48:59 UTC (rev 15605)
@@ -14,12 +14,11 @@
import org.hibernate.Transaction;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
-import org.hibernate.search.SearchFactory;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.DeleteLuceneWork;
-import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.reader.ReaderProvider;
import org.hibernate.search.store.DirectoryProvider;
@@ -104,7 +103,7 @@
public void testAddWorkGetReplacedByDeleteWork() throws Exception {
FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession(
openSession() );
SearchFactoryImpl searchFactory = ( SearchFactoryImpl )
fullTextSession.getSearchFactory();
- DocumentBuilder builder = searchFactory.getDocumentBuilder( SpecialPerson.class );
+ DocumentBuilderIndexedEntity builder = searchFactory.getDocumentBuilderIndexedEntity(
SpecialPerson.class );
// create test entity
SpecialPerson person = new SpecialPerson();