Author: epbernard
Date: 2006-10-11 16:11:02 -0400 (Wed, 11 Oct 2006)
New Revision: 10569
Added:
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/LuceneQueryTest.java
Removed:
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/inheritance/InheritanceTest.java
Log:
ANN-387 Query filtered by classes now include subclasses
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java 2006-10-11
12:51:37 UTC (rev 10568)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -8,7 +8,10 @@
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
@@ -16,193 +19,207 @@
import org.apache.lucene.index.Term;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
-import org.hibernate.util.ReflectHelper;
import org.hibernate.cfg.annotations.Version;
+import org.hibernate.lucene.bridge.BridgeFactory;
+import org.hibernate.lucene.bridge.FieldBridge;
+import org.hibernate.lucene.event.LuceneEventListener;
import org.hibernate.lucene.store.DirectoryProvider;
-import org.hibernate.lucene.bridge.FieldBridge;
-import org.hibernate.lucene.bridge.BridgeFactory;
import org.hibernate.lucene.util.BinderHelper;
-import org.hibernate.lucene.event.LuceneEventListener;
+import org.hibernate.util.ReflectHelper;
//TODO handle attribute (only getters are handled currently)
public class DocumentBuilder<T> {
- static {
- Version.touch(); //touch version
- }
+ static {
+ Version.touch(); //touch version
+ }
- private final List<Member> keywordGetters = new ArrayList<Member>();
- private final List<String> keywordNames = new ArrayList<String>();
- private final List<Member> unstoredGetters = new ArrayList<Member>();
- private final List<String> unstoredNames = new ArrayList<String>();
- private final List<Member> textGetters = new ArrayList<Member>();
- private final List<String> textNames = new ArrayList<String>();
+ private final List<Member> keywordGetters = new ArrayList<Member>();
+ private final List<String> keywordNames = new ArrayList<String>();
+ private final List<Member> unstoredGetters = new ArrayList<Member>();
+ private final List<String> unstoredNames = new ArrayList<String>();
+ private final List<Member> textGetters = new ArrayList<Member>();
+ private final List<String> textNames = new ArrayList<String>();
- //private final Class<T> beanClass;
- private final DirectoryProvider directoryProvider;
- private String idKeywordName;
- private final Analyzer analyzer;
- private Float idBoost;
- public static final String CLASS_FIELDNAME = "_hibernate_class";
- private FieldBridge idBridge;
+ private final Class<T> beanClass;
+ private final DirectoryProvider directoryProvider;
+ private String idKeywordName;
+ private final Analyzer analyzer;
+ private Float idBoost;
+ public static final String CLASS_FIELDNAME = "_hibernate_class";
+ private FieldBridge idBridge;
+ private Set<Class> mappedSubclasses = new HashSet<Class>();
- public DocumentBuilder(Class<?> clazz, Analyzer analyzer, DirectoryProvider
directory) {
- //this.beanClass = clazz;
- this.analyzer = analyzer;
- this.directoryProvider = directory;
+ public DocumentBuilder(Class<T> clazz, Analyzer analyzer, DirectoryProvider
directory) {
+ this.beanClass = clazz;
+ this.analyzer = analyzer;
+ this.directoryProvider = directory;
- for ( Class currClass = clazz; currClass != null ; currClass =
currClass.getSuperclass() ) {
- Method[] methods = currClass.getDeclaredMethods();
- for ( int i = 0; i < methods.length ; i++ ) {
- Method method = methods[i];
- Keyword keywordAnn = method.getAnnotation( Keyword.class );
- if ( keywordAnn != null ) {
- String name = BinderHelper.getAttributeName( method, keywordAnn.name() );
- if ( keywordAnn.id() ) {
- idKeywordName = name;
- idBoost = getBoost( method );
- idBridge = BridgeFactory.guessType( method );
- }
- else {
- setAccessible( method );
- keywordGetters.add( method );
- keywordNames.add( name );
- }
- }
- Unstored unstoredAnn = method.getAnnotation( Unstored.class );
- if ( unstoredAnn != null ) {
- setAccessible( method );
- unstoredGetters.add( method );
- unstoredNames.add( BinderHelper.getAttributeName( method, unstoredAnn.name() ) );
- }
- Text textAnn = method.getAnnotation( Text.class );
- if ( textAnn != null ) {
- textGetters.add( method );
- textNames.add( BinderHelper.getAttributeName( method, textAnn.name() ) );
- }
- }
- }
+ for (Class currClass = clazz; currClass != null; currClass =
currClass.getSuperclass()) {
+ Method[] methods = currClass.getDeclaredMethods();
+ for (int i = 0; i < methods.length; i++) {
+ Method method = methods[i];
+ Keyword keywordAnn = method.getAnnotation(Keyword.class);
+ if (keywordAnn != null) {
+ String name = BinderHelper.getAttributeName(method,
keywordAnn.name());
+ if (keywordAnn.id()) {
+ idKeywordName = name;
+ idBoost = getBoost(method);
+ idBridge = BridgeFactory.guessType(method);
+ } else {
+ setAccessible(method);
+ keywordGetters.add(method);
+ keywordNames.add(name);
+ }
+ }
+ Unstored unstoredAnn = method.getAnnotation(Unstored.class);
+ if (unstoredAnn != null) {
+ setAccessible(method);
+ unstoredGetters.add(method);
+ unstoredNames.add(BinderHelper.getAttributeName(method,
unstoredAnn.name()));
+ }
+ Text textAnn = method.getAnnotation(Text.class);
+ if (textAnn != null) {
+ textGetters.add(method);
+ textNames.add(BinderHelper.getAttributeName(method,
textAnn.name()));
+ }
+ }
+ }
- if ( idKeywordName == null ) {
- throw new HibernateException( "No id Keyword for: " + clazz.getName() );
- }
- }
+ if (idKeywordName == null) {
+ throw new HibernateException("No id Keyword for: " +
clazz.getName());
+ }
+ }
- private Float getBoost(AnnotatedElement element) {
- if (element == null) return null;
- Boost boost = element.getAnnotation( Boost.class );
- return boost != null ? Float.valueOf( boost.value() ) : null;
- }
- private Object getValue(Member member, T bean) {
- try {
- if ( member instanceof java.lang.reflect.Field ) {
- return ( (java.lang.reflect.Field) member ).get( bean );
- }
- else if ( member instanceof Method ) {
- return ( (Method) member ).invoke( bean );
- }
- else {
- throw new AssertionFailure( "Unexpected member: " +
member.getClass().getName() );
- }
- }
- catch (Exception e) {
- throw new IllegalStateException( "Could not get property value", e );
- }
- }
+ private Float getBoost(AnnotatedElement element) {
+ if (element == null) return null;
+ Boost boost = element.getAnnotation(Boost.class);
+ return boost != null ? Float.valueOf(boost.value()) : null;
+ }
- public Document getDocument(T instance, Serializable id) {
- Document doc = new Document();
- Float boost = getBoost( instance.getClass() );
- if (boost != null) {
- doc.setBoost( boost.floatValue() );
- }
- {
- Field classField = new Field( CLASS_FIELDNAME, instance.getClass().getName(),
Field.Store.YES, Field.Index.NO);
- doc.add( classField );
- idBridge.set( idKeywordName, id, doc, Field.Store.YES, Field.Index.UN_TOKENIZED,
idBoost );
- }
- for ( int i = 0; i < keywordNames.size() ; i++ ) {
- Member member = keywordGetters.get( i );
- Object value = getValue( member, instance );
- if ( value != null ) {
- Field field = new Field( keywordNames.get( i ), toString( value ), Field.Store.YES,
Field.Index.UN_TOKENIZED );
- boostField(field, member);
- doc.add( field );
- }
- }
- for ( int i = 0; i < textNames.size() ; i++ ) {
- Member member = textGetters.get( i );
- Object value = getValue( member, instance );
- if ( value != null ) {
- Field field = new Field( textNames.get( i ), toString( value ), Field.Store.YES,
Field.Index.TOKENIZED );
- boostField(field, member);
- doc.add( field );
- }
- }
- for ( int i = 0; i < unstoredNames.size() ; i++ ) {
- Member member = unstoredGetters.get( i );
- Object value = getValue( member, instance );
- if ( value != null ) {
- Field field = new Field( unstoredNames.get( i ), toString( value ), Field.Store.NO,
Field.Index.TOKENIZED );
- boostField(field, member);
- doc.add( field );
- }
- }
+ private Object getValue(Member member, T bean) {
+ try {
+ if (member instanceof java.lang.reflect.Field) {
+ return ((java.lang.reflect.Field) member).get(bean);
+ } else if (member instanceof Method) {
+ return ((Method) member).invoke(bean);
+ } else {
+ throw new AssertionFailure("Unexpected member: " +
member.getClass().getName());
+ }
+ }
+ catch (Exception e) {
+ throw new IllegalStateException("Could not get property value",
e);
+ }
+ }
- return doc;
- }
+ public Document getDocument(T instance, Serializable id) {
+ Document doc = new Document();
+ Float boost = getBoost(instance.getClass());
+ if (boost != null) {
+ doc.setBoost(boost.floatValue());
+ }
+ {
+ Field classField = new Field(CLASS_FIELDNAME, instance.getClass().getName(),
Field.Store.YES, Field.Index.NO);
+ doc.add(classField);
+ idBridge.set(idKeywordName, id, doc, Field.Store.YES,
Field.Index.UN_TOKENIZED, idBoost);
+ }
+ for (int i = 0; i < keywordNames.size(); i++) {
+ Member member = keywordGetters.get(i);
+ Object value = getValue(member, instance);
+ if (value != null) {
+ Field field = new Field(keywordNames.get(i), toString(value),
Field.Store.YES, Field.Index.UN_TOKENIZED);
+ boostField(field, member);
+ doc.add(field);
+ }
+ }
+ for (int i = 0; i < textNames.size(); i++) {
+ Member member = textGetters.get(i);
+ Object value = getValue(member, instance);
+ if (value != null) {
+ Field field = new Field(textNames.get(i), toString(value),
Field.Store.YES, Field.Index.TOKENIZED);
+ boostField(field, member);
+ doc.add(field);
+ }
+ }
+ for (int i = 0; i < unstoredNames.size(); i++) {
+ Member member = unstoredGetters.get(i);
+ Object value = getValue(member, instance);
+ if (value != null) {
+ Field field = new Field(unstoredNames.get(i), toString(value),
Field.Store.NO, Field.Index.TOKENIZED);
+ boostField(field, member);
+ doc.add(field);
+ }
+ }
- private void boostField(Field field, Member member) {
- Float boost = getBoost( (AnnotatedElement) member );
- if (boost != null) field.setBoost( boost.floatValue() );
- }
+ return doc;
+ }
- private static String toString(Object value) {
- return value.toString();
- }
+ private void boostField(Field field, Member member) {
+ Float boost = getBoost((AnnotatedElement) member);
+ if (boost != null) field.setBoost(boost.floatValue());
+ }
- public Term getTerm(Serializable id) {
- return new Term( idKeywordName, id.toString() );
- }
+ private static String toString(Object value) {
+ return value.toString();
+ }
- public DirectoryProvider getDirectoryProvider() {
- return directoryProvider;
- }
+ public Term getTerm(Serializable id) {
+ return new Term(idKeywordName, id.toString());
+ }
- public Analyzer getAnalyzer() {
- return analyzer;
- }
+ public DirectoryProvider getDirectoryProvider() {
+ return directoryProvider;
+ }
- private static void setAccessible(Member member) {
- if ( !Modifier.isPublic( member.getModifiers() ) ) {
- ( (AccessibleObject) member ).setAccessible( true );
- }
- }
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
- public FieldBridge getIdBridge() {
- return idBridge;
- }
+ private static void setAccessible(Member member) {
+ if (!Modifier.isPublic(member.getModifiers())) {
+ ((AccessibleObject) member).setAccessible(true);
+ }
+ }
- public String getIdKeywordName() {
- return idKeywordName;
- }
+ public FieldBridge getIdBridge() {
+ return idBridge;
+ }
- public static Class getDocumentClass(Document document) {
- String className = document.get( DocumentBuilder.CLASS_FIELDNAME );
- try {
- return ReflectHelper.classForName( className );
- }
- catch (ClassNotFoundException e) {
- throw new HibernateException("Unable to load indexed class: " + className,
e);
- }
- }
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
- public static Serializable getDocumentId(LuceneEventListener listener, Class clazz,
Document document) {
- DocumentBuilder builder = listener.getDocumentBuilders().get( clazz );
- if (builder == null) throw new HibernateException("No Lucene configuration set up
for: " + clazz.getName() );
- Serializable id = (Serializable) builder.getIdBridge().get( builder.getIdKeywordName(),
document );
- return id;
- }
+ public static Class getDocumentClass(Document document) {
+ String className = document.get(DocumentBuilder.CLASS_FIELDNAME);
+ try {
+ return ReflectHelper.classForName(className);
+ }
+ catch (ClassNotFoundException e) {
+ throw new HibernateException("Unable to load indexed class: " +
className, e);
+ }
+ }
+ public static Serializable getDocumentId(LuceneEventListener listener, Class clazz,
Document document) {
+ DocumentBuilder builder = listener.getDocumentBuilders().get(clazz);
+ if (builder == null) throw new HibernateException("No Lucene configuration
set up for: " + clazz.getName());
+ Serializable id = (Serializable)
builder.getIdBridge().get(builder.getIdKeywordName(), document);
+ return id;
+ }
+
+ public void postInitialize(Set<Class> indexedClasses) {
+ //this method does not requires synchronization
+
+ Set<Class> tempMappedSubclasses = new HashSet<Class>();
+ //together with the caller this creates a o(2), but I think it's still faster
than create the up hierarchy for each class
+ for (Class currentClass : indexedClasses) {
+ if (beanClass.isAssignableFrom(currentClass))
tempMappedSubclasses.add(currentClass);
+ }
+ mappedSubclasses = Collections.unmodifiableSet(tempMappedSubclasses);
+ }
+
+
+ public Set<Class> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
}
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java 2006-10-11
12:51:37 UTC (rev 10568)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -6,6 +6,7 @@
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
@@ -18,7 +19,6 @@
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.hibernate.HibernateException;
-import org.hibernate.util.ReflectHelper;
import org.hibernate.cfg.Configuration;
import org.hibernate.event.Initializable;
import org.hibernate.event.PostDeleteEvent;
@@ -33,6 +33,7 @@
import org.hibernate.lucene.store.DirectoryProvider;
import org.hibernate.lucene.store.DirectoryProviderFactory;
import org.hibernate.mapping.PersistentClass;
+import org.hibernate.util.ReflectHelper;
/**
* This listener supports setting a parent directory for all generated index files.
@@ -44,142 +45,145 @@
*/
//TODO work on sharing the same indexWriters and readers across a single post
operation...
public class LuceneEventListener implements PostDeleteEventListener,
PostInsertEventListener,
- PostUpdateEventListener, Initializable {
+ PostUpdateEventListener, Initializable {
- //FIXME keeping this here is a bad decision since you might want to search indexes wo
maintain it
- @Deprecated
- public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
- return documentBuilders;
- }
+ //FIXME keeping this here is a bad decision since you might want to search indexes wo
maintain it
+ @Deprecated
+ public Map<Class, DocumentBuilder<Object>> getDocumentBuilders() {
+ return documentBuilders;
+ }
- private Map<Class, DocumentBuilder<Object>> documentBuilders = new
HashMap<Class, DocumentBuilder<Object>>();
- //keep track of the index modifiers per DirectoryProvider since multiple entity can use
the same directory provider
- private Map<DirectoryProvider, Lock> indexLock = new HashMap<DirectoryProvider,
Lock>();
- private boolean initialized;
+ private Map<Class, DocumentBuilder<Object>> documentBuilders = new
HashMap<Class, DocumentBuilder<Object>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can
use the same directory provider
+ private Map<DirectoryProvider, Lock> indexLock = new
HashMap<DirectoryProvider, Lock>();
+ private boolean initialized;
- private static final Log log = LogFactory.getLog( LuceneEventListener.class );
+ private static final Log log = LogFactory.getLog(LuceneEventListener.class);
- public void initialize(Configuration cfg) {
- if ( initialized ) return;
+ public void initialize(Configuration cfg) {
+ if (initialized) return;
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
- if ( analyzerClassName != null ) {
- try {
- analyzerClass = ReflectHelper.classForName( analyzerClassName );
- }
- catch (Exception e) {
- throw new HibernateException(
- "Lucene analyzer class '" + analyzerClassName + "' defined
in property '" + Environment.ANALYZER_CLASS + "' could not be
found.",
- e
- );
- }
- }
- else {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer analyzer;
- try {
- analyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new HibernateException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ":
" + analyzerClassName
- );
- }
- catch (Exception e) {
- throw new HibernateException( "Failed to instantiate lucene analyzer with type
" + analyzerClassName );
- }
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty(Environment.ANALYZER_CLASS);
+ if (analyzerClassName != null) {
+ try {
+ analyzerClass = ReflectHelper.classForName(analyzerClassName);
+ }
+ catch (Exception e) {
+ throw new HibernateException(
+ "Lucene analyzer class '" + analyzerClassName +
"' defined in property '" + Environment.ANALYZER_CLASS + "'
could not be found.",
+ e
+ );
+ }
+ } else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer analyzer;
+ try {
+ analyzer = (Analyzer) analyzerClass.newInstance();
+ }
+ catch (ClassCastException e) {
+ throw new HibernateException(
+ "Lucene analyzer does not implement " +
Analyzer.class.getName() + ": " + analyzerClassName
+ );
+ }
+ catch (Exception e) {
+ throw new HibernateException("Failed to instantiate lucene analyzer with
type " + analyzerClassName);
+ }
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
- while ( iter.hasNext() ) {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if ( mappedClass != null ) {
- if ( mappedClass.isAnnotationPresent( Indexed.class ) ) {
- DirectoryProvider provider = factory.createDirectoryProvider( mappedClass, cfg );
- final DocumentBuilder<Object> documentBuilder = new
DocumentBuilder<Object>(
- mappedClass, analyzer, provider
- );
- if ( ! indexLock.containsKey( provider ) ) {
- indexLock.put( provider, new ReentrantLock() );
- }
- documentBuilders.put( mappedClass, documentBuilder );
- }
- }
- }
- initialized = true;
- }
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while (iter.hasNext()) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if (mappedClass != null) {
+ if (mappedClass.isAnnotationPresent(Indexed.class)) {
+ DirectoryProvider provider =
factory.createDirectoryProvider(mappedClass, cfg);
+ final DocumentBuilder<Object> documentBuilder = new
DocumentBuilder<Object>(
+ (Class<Object>) mappedClass, analyzer, provider
+ );
+ if (!indexLock.containsKey(provider)) {
+ indexLock.put(provider, new ReentrantLock());
+ }
+ documentBuilders.put(mappedClass, documentBuilder);
+ }
+ }
+ }
+ Set<Class> indexedClasses = documentBuilders.keySet();
+ for (DocumentBuilder builder : documentBuilders.values()) {
+ builder.postInitialize(indexedClasses);
+ }
+ initialized = true;
+ }
- public void onPostDelete(PostDeleteEvent event) {
- DocumentBuilder builder = documentBuilders.get( event.getEntity().getClass() );
- if ( builder != null ) {
- remove( builder, event.getId() );
- }
- }
+ public void onPostDelete(PostDeleteEvent event) {
+ DocumentBuilder builder = documentBuilders.get(event.getEntity().getClass());
+ if (builder != null) {
+ remove(builder, event.getId());
+ }
+ }
- public void onPostInsert(PostInsertEvent event) {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
- if ( builder != null ) {
- add( entity, builder, event.getId() );
- }
- }
+ public void onPostInsert(PostInsertEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get(entity.getClass());
+ if (builder != null) {
+ add(entity, builder, event.getId());
+ }
+ }
- public void onPostUpdate(PostUpdateEvent event) {
- final Object entity = event.getEntity();
- DocumentBuilder<Object> builder = documentBuilders.get( entity.getClass() );
- if ( builder != null ) {
- final Serializable id = event.getId();
- remove( builder, id );
- add( entity, builder, id );
- }
- }
+ public void onPostUpdate(PostUpdateEvent event) {
+ final Object entity = event.getEntity();
+ DocumentBuilder<Object> builder = documentBuilders.get(entity.getClass());
+ if (builder != null) {
+ final Serializable id = event.getId();
+ remove(builder, id);
+ add(entity, builder, id);
+ }
+ }
- private void remove(DocumentBuilder<?> builder, Serializable id) {
- Term term = builder.getTerm( id );
- log.debug( "removing: " + term );
- DirectoryProvider directoryProvider = builder.getDirectoryProvider();
- Lock lock = indexLock.get( directoryProvider );
- lock.lock();
- try {
+ private void remove(DocumentBuilder<?> builder, Serializable id) {
+ Term term = builder.getTerm(id);
+ log.debug("removing: " + term);
+ DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+ Lock lock = indexLock.get(directoryProvider);
+ lock.lock();
+ try {
- IndexReader reader = IndexReader.open( directoryProvider.getDirectory() );
- reader.deleteDocuments( term );
- reader.close();
- }
- catch (IOException ioe) {
- throw new HibernateException( ioe );
- }
- finally {
- lock.unlock();
- }
- }
+ IndexReader reader = IndexReader.open(directoryProvider.getDirectory());
+ reader.deleteDocuments(term);
+ reader.close();
+ }
+ catch (IOException ioe) {
+ throw new HibernateException(ioe);
+ }
+ finally {
+ lock.unlock();
+ }
+ }
- private void add(final Object entity, final DocumentBuilder<Object> builder, final
Serializable id) {
- Document doc = builder.getDocument( entity, id );
- if ( log.isDebugEnabled() ) {
- log.debug( "adding: " + doc );
- }
- DirectoryProvider directoryProvider = builder.getDirectoryProvider();
- Lock lock = indexLock.get( directoryProvider );
- lock.lock();
- try {
- IndexWriter writer = new IndexWriter(
- directoryProvider.getDirectory(), builder.getAnalyzer(), false
- ); //have been created at init time
- writer.addDocument( doc );
- writer.close();
- }
- catch (IOException ioe) {
- throw new HibernateException( ioe );
- }
- finally {
- lock.unlock();
- }
- }
+ private void add(final Object entity, final DocumentBuilder<Object> builder,
final Serializable id) {
+ Document doc = builder.getDocument(entity, id);
+ if (log.isDebugEnabled()) {
+ log.debug("adding: " + doc);
+ }
+ DirectoryProvider directoryProvider = builder.getDirectoryProvider();
+ Lock lock = indexLock.get(directoryProvider);
+ lock.lock();
+ try {
+ IndexWriter writer = new IndexWriter(
+ directoryProvider.getDirectory(), builder.getAnalyzer(), false
+ ); //have been created at init time
+ writer.addDocument(doc);
+ writer.close();
+ }
+ catch (IOException ioe) {
+ throw new HibernateException(ioe);
+ }
+ finally {
+ lock.unlock();
+ }
+ }
}
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java 2006-10-11
12:51:37 UTC (rev 10568)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/java/org/hibernate/lucene/query/LuceneQueryImpl.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -4,6 +4,7 @@
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -37,222 +38,237 @@
*/
//implements setParameter()
public class LuceneQueryImpl extends AbstractQueryImpl {
- private static final Log log = LogFactory.getLog( LuceneQueryImpl.class );
- private org.apache.lucene.search.Query luceneQuery;
- private Class[] classes;
- private Integer firstResult;
- private Integer maxResults;
- private int resultSize;
+ private static final Log log = LogFactory.getLog(LuceneQueryImpl.class);
+ private org.apache.lucene.search.Query luceneQuery;
+ private Class[] classes;
+ private Integer firstResult;
+ private Integer maxResults;
+ private int resultSize;
- /** classes must be immutable */
- public LuceneQueryImpl(org.apache.lucene.search.Query query, Class[] classes,
SessionImplementor session, ParameterMetadata parameterMetadata) {
- //TODO handle flushMode
- super( query.toString(), null, session, parameterMetadata );
- this.luceneQuery = query;
- this.classes = classes;
- }
+ /**
+ * classes must be immutable
+ */
+ public LuceneQueryImpl(org.apache.lucene.search.Query query, Class[] classes,
SessionImplementor session, ParameterMetadata parameterMetadata) {
+ //TODO handle flushMode
+ super(query.toString(), null, session, parameterMetadata);
+ this.luceneQuery = query;
+ this.classes = classes;
+ }
- /**
- * Return an interator on the results.
- * Retrieve the object one by one (initialize it during the next() operation)
- */
- public Iterator iterate() throws HibernateException {
- //implement an interator which keep the id/class for each hit and get the object on
demand
- //cause I can't keep the searcher and hence the hit opened. I dont have any hook to
know when the
- //user stop using it
- //scrollable is better in this area
+ /**
+ * Return an interator on the results.
+ * Retrieve the object one by one (initialize it during the next() operation)
+ */
+ public Iterator iterate() throws HibernateException {
+ //implement an interator which keep the id/class for each hit and get the object
on demand
+ //cause I can't keep the searcher and hence the hit opened. I dont have any
hook to know when the
+ //user stop using it
+ //scrollable is better in this area
- LuceneEventListener listener = getLuceneEventListener();
- //find the directories
- Searcher searcher = buildSearcher( listener );
- Hits hits;
- try {
- hits = searcher.search( luceneQuery );
- setResultSize(hits);
- int first = first();
- int max = max( first, hits );
- EntityInfo[] entityInfos = new EntityInfo[max - first + 1];
- for (int index = first ; index <= max ; index++ ) {
- Document document = hits.doc( index );
- EntityInfo entityInfo = new EntityInfo();
- entityInfo.clazz = DocumentBuilder.getDocumentClass( document );
- //FIXME should check that clazz match classes but this complexify a lot the
firstResult/maxResult
- entityInfo.id = DocumentBuilder.getDocumentId( listener, entityInfo.clazz, document
);
- entityInfos[ index - first ] = entityInfo;
- }
- return new IteratorImpl( entityInfos, (Session) this.session );
- }
- catch (IOException e) {
- throw new HibernateException("Unable to query Lucene index", e);
- }
- finally {
- if (searcher != null) try {
- searcher.close();
- }
- catch (IOException e) {
- log.warn( "Unable to properly close searcher during lucene query: " +
getQueryString(), e );
- }
- }
- }
+ LuceneEventListener listener = getLuceneEventListener();
+ //find the directories
+ Searcher searcher = buildSearcher(listener);
+ Hits hits;
+ try {
+ hits = searcher.search(luceneQuery);
+ setResultSize(hits);
+ int first = first();
+ int max = max(first, hits);
+ EntityInfo[] entityInfos = new EntityInfo[max - first + 1];
+ for (int index = first; index <= max; index++) {
+ Document document = hits.doc(index);
+ EntityInfo entityInfo = new EntityInfo();
+ entityInfo.clazz = DocumentBuilder.getDocumentClass(document);
+ //FIXME should check that clazz match classes but this complexify a lot
the firstResult/maxResult
+ entityInfo.id = DocumentBuilder.getDocumentId(listener, entityInfo.clazz,
document);
+ entityInfos[index - first] = entityInfo;
+ }
+ return new IteratorImpl(entityInfos, (Session) this.session);
+ }
+ catch (IOException e) {
+ throw new HibernateException("Unable to query Lucene index", e);
+ }
+ finally {
+ if (searcher != null) try {
+ searcher.close();
+ }
+ catch (IOException e) {
+ log.warn("Unable to properly close searcher during lucene query:
" + getQueryString(), e);
+ }
+ }
+ }
- public ScrollableResults scroll() throws HibernateException {
- //keep the searcher open until the resultset is closed
- LuceneEventListener listener = getLuceneEventListener();
- //find the directories
- Searcher searcher = buildSearcher( listener );
- Hits hits;
- try {
- hits = searcher.search( luceneQuery );
- setResultSize(hits);
- int first = first();
- int max = max( first, hits );
- return new ScrollableResultsImpl( searcher, hits, first, max, (Session) this.session,
listener );
- }
- catch (IOException e) {
- try {
- if ( searcher != null ) searcher.close();
- }
- catch (IOException ee) {
- //we have the initial issue already
- }
- throw new HibernateException("Unable to query Lucene index", e);
- }
- }
+ public ScrollableResults scroll() throws HibernateException {
+ //keep the searcher open until the resultset is closed
+ LuceneEventListener listener = getLuceneEventListener();
+ //find the directories
+ Searcher searcher = buildSearcher(listener);
+ Hits hits;
+ try {
+ hits = searcher.search(luceneQuery);
+ setResultSize(hits);
+ int first = first();
+ int max = max(first, hits);
+ return new ScrollableResultsImpl(searcher, hits, first, max, (Session)
this.session, listener);
+ }
+ catch (IOException e) {
+ try {
+ if (searcher != null) searcher.close();
+ }
+ catch (IOException ee) {
+ //we have the initial issue already
+ }
+ throw new HibernateException("Unable to query Lucene index", e);
+ }
+ }
- public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException {
- //TODO think about this scrollmode
- return scroll();
- }
+ public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException {
+ //TODO think about this scrollmode
+ return scroll();
+ }
- public List list() throws HibernateException {
- LuceneEventListener listener = getLuceneEventListener();
- //find the directories
- Searcher searcher = buildSearcher( listener );
- Hits hits;
- try {
- hits = searcher.search( luceneQuery );
- setResultSize(hits);
- int first = first();
- int max = max( first, hits );
- List result = new ArrayList( max - first + 1);
- Session sess = (Session) this.session;
- for (int index = first ; index <= max ; index++ ) {
- Document document = hits.doc( index );
- Class clazz = DocumentBuilder.getDocumentClass( document );
- //FIXME should check that clazz match classes but this complexify a lot the
firstResult/maxResult
- Serializable id = DocumentBuilder.getDocumentId( listener, clazz, document );
- result.add( sess.load( clazz, id ) );
- //use load to benefit from the batch-size (but facing some proxy casting issues...
- }
- //then initialize the objects
- for (Object element : result) {
- Hibernate.initialize(element);
- }
- return result;
- }
- catch (IOException e) {
- throw new HibernateException("Unable to query Lucene index", e);
- }
- finally {
- if (searcher != null) try {
- searcher.close();
- }
- catch (IOException e) {
- log.warn( "Unable to properly close searcher during lucene query: " +
getQueryString(), e );
- }
- }
- }
+ public List list() throws HibernateException {
+ LuceneEventListener listener = getLuceneEventListener();
+ //find the directories
+ Searcher searcher = buildSearcher(listener);
+ Hits hits;
+ try {
+ hits = searcher.search(luceneQuery);
+ setResultSize(hits);
+ int first = first();
+ int max = max(first, hits);
+ List result = new ArrayList(max - first + 1);
+ Session sess = (Session) this.session;
+ for (int index = first; index <= max; index++) {
+ Document document = hits.doc(index);
+ Class clazz = DocumentBuilder.getDocumentClass(document);
+ //FIXME should check that clazz match classes but this complexify a lot
the firstResult/maxResult
+ Serializable id = DocumentBuilder.getDocumentId(listener, clazz,
document);
+ result.add(sess.load(clazz, id));
+ //use load to benefit from the batch-size (but facing some proxy casting
issues...
+ }
+ //then initialize the objects
+ for (Object element : result) {
+ Hibernate.initialize(element);
+ }
+ return result;
+ }
+ catch (IOException e) {
+ throw new HibernateException("Unable to query Lucene index", e);
+ }
+ finally {
+ if (searcher != null) try {
+ searcher.close();
+ }
+ catch (IOException e) {
+ log.warn("Unable to properly close searcher during lucene query:
" + getQueryString(), e);
+ }
+ }
+ }
- private int max(int first, Hits hits) {
- return maxResults == null ?
- first + hits.length() - 1 :
- maxResults + first < hits.length() ?
- first + maxResults :
- hits.length() - 1;
- }
+ private int max(int first, Hits hits) {
+ return maxResults == null ?
+ first + hits.length() - 1 :
+ maxResults + first < hits.length() ?
+ first + maxResults :
+ hits.length() - 1;
+ }
- private int first() {
- return firstResult != null ? firstResult : 0;
- }
+ private int first() {
+ return firstResult != null ? firstResult : 0;
+ }
- private Searcher buildSearcher(LuceneEventListener listener) {
- Map<Class, DocumentBuilder<Object>> builders =
listener.getDocumentBuilders();
- Set<Directory> directories = new HashSet<Directory>();
- for (Class clazz : classes) {
- DocumentBuilder builder = builders.get(clazz);
- if (builder == null) throw new HibernateException( "Not a mapped entity: " +
clazz);
- directories.add( builder.getDirectoryProvider().getDirectory() );
- }
+ private Searcher buildSearcher(LuceneEventListener listener) {
+ Map<Class, DocumentBuilder<Object>> builders =
listener.getDocumentBuilders();
+ Set<Directory> directories = new HashSet<Directory>();
+ if (classes == null || classes.length == 0) {
+ //no class means all classes
+ for (DocumentBuilder builder : builders.values()) {
+ directories.add(builder.getDirectoryProvider().getDirectory());
+ }
+ } else {
+ Set<Class> involvedClasses = new HashSet<Class>(classes.length);
+ Collections.addAll(involvedClasses, classes);
+ for (Class clazz : classes) {
+ DocumentBuilder builder = builders.get(clazz);
+ if (builder != null)
involvedClasses.addAll(builder.getMappedSubclasses());
+ }
+ for (Class clazz : involvedClasses) {
+ DocumentBuilder builder = builders.get(clazz);
+ //TODO should we rather choose a polymorphic path and allow non mapped
entities
+ if (builder == null) throw new HibernateException("Not a mapped
entity: " + clazz);
+ directories.add(builder.getDirectoryProvider().getDirectory());
+ }
+ }
- //set up the searcher
- Searcher searcher;
- int dirNbr = directories.size();
- if (dirNbr > 1) {
- try {
- IndexSearcher[] searchers = new IndexSearcher[ dirNbr ];
- Iterator<Directory> it = directories.iterator();
- for (int index = 0 ; index < dirNbr ; index++) {
- searchers[index] = new IndexSearcher( it.next() );
- }
- searcher = new MultiSearcher(searchers);
- }
- catch(IOException e) {
- throw new HibernateException("Unable to read Lucene directory", e);
- }
- }
- else {
- try {
- searcher = new IndexSearcher( directories.iterator().next() );
- }
- catch (IOException e) {
- throw new HibernateException("Unable to read Lucene directory", e);
- }
- }
- return searcher;
- }
+ //set up the searcher
+ Searcher searcher;
+ int dirNbr = directories.size();
+ if (dirNbr > 1) {
+ try {
+ IndexSearcher[] searchers = new IndexSearcher[dirNbr];
+ Iterator<Directory> it = directories.iterator();
+ for (int index = 0; index < dirNbr; index++) {
+ searchers[index] = new IndexSearcher(it.next());
+ }
+ searcher = new MultiSearcher(searchers);
+ }
+ catch (IOException e) {
+ throw new HibernateException("Unable to read Lucene directory",
e);
+ }
+ } else {
+ try {
+ searcher = new IndexSearcher(directories.iterator().next());
+ }
+ catch (IOException e) {
+ throw new HibernateException("Unable to read Lucene directory",
e);
+ }
+ }
+ return searcher;
+ }
- private void setResultSize(Hits hits) {
- resultSize = hits.length();
- }
+ private void setResultSize(Hits hits) {
+ resultSize = hits.length();
+ }
- //FIXME does it make sense
- public int resultSize() {
- return this.resultSize;
- }
+ //FIXME does it make sense
+ public int resultSize() {
+ return this.resultSize;
+ }
- public Query setFirstResult(int firstResult) {
- this.firstResult = firstResult;
- return this;
- }
+ public Query setFirstResult(int firstResult) {
+ this.firstResult = firstResult;
+ return this;
+ }
- public Query setMaxResults(int maxResults) {
- this.maxResults = maxResults;
- return this;
- }
+ public Query setMaxResults(int maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
- private LuceneEventListener getLuceneEventListener() {
- PostInsertEventListener[] listeners =
session.getListeners().getPostCommitInsertEventListeners();
- LuceneEventListener listener = null;
- //FIXME this sucks since we mandante the event listener use
- for(PostInsertEventListener candidate : listeners ) {
- if (candidate instanceof LuceneEventListener) {
- listener = (LuceneEventListener) candidate;
- break;
- }
- }
- if (listener == null) throw new HibernateException("Lucene event listener not
initialized");
- return listener;
- }
+ private LuceneEventListener getLuceneEventListener() {
+ PostInsertEventListener[] listeners =
session.getListeners().getPostCommitInsertEventListeners();
+ LuceneEventListener listener = null;
+ //FIXME this sucks since we mandante the event listener use
+ for (PostInsertEventListener candidate : listeners) {
+ if (candidate instanceof LuceneEventListener) {
+ listener = (LuceneEventListener) candidate;
+ break;
+ }
+ }
+ if (listener == null) throw new HibernateException("Lucene event listener
not initialized");
+ return listener;
+ }
- public int executeUpdate() throws HibernateException {
- throw new HibernateException( "Not supported operation" );
- }
+ public int executeUpdate() throws HibernateException {
+ throw new HibernateException("Not supported operation");
+ }
- public Query setLockMode(String alias, LockMode lockMode) {
- return null;
- }
+ public Query setLockMode(String alias, LockMode lockMode) {
+ return null;
+ }
- protected Map getLockModes() {
- return null;
- }
+ protected Map getLockModes() {
+ return null;
+ }
}
Modified:
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/inheritance/InheritanceTest.java
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/inheritance/InheritanceTest.java 2006-10-11
12:51:37 UTC (rev 10568)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/inheritance/InheritanceTest.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -24,11 +24,11 @@
LuceneSession s = new LuceneSession( openSession() );
Transaction tx = s.beginTransaction();
Animal a = new Animal();
- a.setName("Shark");
+ a.setName("Shark Jr");
s.save( a );
Mammal m = new Mammal();
m.setMammalNbr(2);
- m.setName("Elephant");
+ m.setName("Elephant Jr");
s.save(m);
tx.commit();//post commit events for lucene
s.clear();
@@ -39,17 +39,25 @@
org.hibernate.Query hibQuery;
query = parser.parse( "Elephant" );
- hibQuery = s.createLuceneQuery( query, Animal.class, Mammal.class );
+ hibQuery = s.createLuceneQuery( query, Mammal.class );
List result = hibQuery.list();
assertNotNull( result );
- assertEquals( 1, result.size() );
+ assertEquals( "Query subclass by superclass attribute", 1, result.size() );
query = parser.parse( "mammalNbr:[2 TO 2]" );
hibQuery = s.createLuceneQuery( query, Animal.class, Mammal.class );
result = hibQuery.list();
assertNotNull( result );
- assertEquals( 1, result.size() );
+ assertEquals( "Query subclass by subclass attribute", 1, result.size() );
+ query = parser.parse( "Jr" );
+ hibQuery = s.createLuceneQuery( query, Animal.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query filtering on superclass return mapped subclasses", 2,
result.size() );
+ for (Object managedEntity : result) {
+ s.delete(managedEntity);
+ }
tx.commit();
s.close();
}
Copied:
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/LuceneQueryTest.java
(from rev 10565,
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java)
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java 2006-10-05
06:08:25 UTC (rev 10565)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/LuceneQueryTest.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -0,0 +1,148 @@
+//$Id: $
+package org.hibernate.lucene.test.query;
+
+import java.util.List;
+import java.util.Iterator;
+
+import org.hibernate.lucene.test.TestCase;
+import org.hibernate.lucene.LuceneSession;
+import org.hibernate.Transaction;
+import org.hibernate.Hibernate;
+import org.hibernate.ScrollableResults;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class LuceneQueryTest extends TestCase {
+
+ public void testList() throws Exception {
+ LuceneSession s = new LuceneSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
+ s.save(book);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( 0, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter", 2, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createLuceneQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no class filter", 2, result.size() );
+ for (Object element : result) {
+ assertTrue( Hibernate.isInitialized( element ) );
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testIterator() throws Exception {
+ LuceneSession s = new LuceneSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
+ s.save(book);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ Iterator result = hibQuery.iterate();
+ assertNotNull( result );
+ assertFalse( result.hasNext() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ result = hibQuery.iterate();
+ assertNotNull( result );
+ int index = 0;
+ while ( result.hasNext() ) {
+ index++;
+ s.delete( result.next() );
+ }
+ assertEquals( 2, index );
+ tx.commit();
+ s.close();
+ }
+
+ public void testScrollableResultSet() throws Exception {
+ LuceneSession s = new LuceneSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock(1, "Seiko");
+ s.save( clock );
+ clock = new Clock( 2, "Festina");
+ s.save( clock );
+ Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
+ s.save(book);
+ book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
+ s.save(book);
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser("title", new StopAnalyzer() );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ ScrollableResults result = hibQuery.scroll();
+ assertNotNull( result );
+ assertEquals(-1, result.getRowNumber() );
+ assertEquals(false, result.next() );
+ result.close();
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
+ result = hibQuery.scroll();
+ assertEquals(0, result.getRowNumber() );
+ result.beforeFirst();
+ assertEquals( true, result.next() );
+ assertTrue( result.isFirst() );
+ assertTrue( result.scroll( 1 ) );
+ assertTrue( result.isLast() );
+ assertFalse( result.scroll( 1 ) );
+ result.beforeFirst();
+ while ( result.next() ) {
+ s.delete( result.get()[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Clock.class
+ };
+ }
+}
Deleted:
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java
===================================================================
---
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java 2006-10-11
12:51:37 UTC (rev 10568)
+++
branches/Lucene_Integration/HibernateExt/metadata/src/test/org/hibernate/lucene/test/query/QueryTest.java 2006-10-11
20:11:02 UTC (rev 10569)
@@ -1,142 +0,0 @@
-//$Id: $
-package org.hibernate.lucene.test.query;
-
-import java.util.List;
-import java.util.Iterator;
-
-import org.hibernate.lucene.test.TestCase;
-import org.hibernate.lucene.LuceneSession;
-import org.hibernate.Transaction;
-import org.hibernate.Hibernate;
-import org.hibernate.ScrollableResults;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.analysis.StopAnalyzer;
-
-
-/**
- * @author Emmanuel Bernard
- */
-public class QueryTest extends TestCase {
-
- public void testList() throws Exception {
- LuceneSession s = new LuceneSession( openSession() );
- Transaction tx = s.beginTransaction();
- Clock clock = new Clock(1, "Seiko");
- s.save( clock );
- clock = new Clock( 2, "Festina");
- s.save( clock );
- Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
- s.save(book);
- book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
- s.save(book);
- tx.commit();//post commit events for lucene
- s.clear();
- tx = s.beginTransaction();
- QueryParser parser = new QueryParser("title", new StopAnalyzer() );
-
- Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- List result = hibQuery.list();
- assertNotNull( result );
- assertEquals( 0, result.size() );
-
- query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- result = hibQuery.list();
- assertNotNull( result );
- assertEquals( 2, result.size() );
- for (Object element : result) {
- assertTrue( Hibernate.isInitialized( element ) );
- s.delete( element );
- }
- tx.commit();
- s.close();
- }
-
- public void testIterator() throws Exception {
- LuceneSession s = new LuceneSession( openSession() );
- Transaction tx = s.beginTransaction();
- Clock clock = new Clock(1, "Seiko");
- s.save( clock );
- clock = new Clock( 2, "Festina");
- s.save( clock );
- Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
- s.save(book);
- book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
- s.save(book);
- tx.commit();//post commit events for lucene
- s.clear();
- tx = s.beginTransaction();
- QueryParser parser = new QueryParser("title", new StopAnalyzer() );
-
- Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- Iterator result = hibQuery.iterate();
- assertNotNull( result );
- assertFalse( result.hasNext() );
-
- query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- result = hibQuery.iterate();
- assertNotNull( result );
- int index = 0;
- while ( result.hasNext() ) {
- index++;
- s.delete( result.next() );
- }
- assertEquals( 2, index );
- tx.commit();
- s.close();
- }
-
- public void testScrollableResultSet() throws Exception {
- LuceneSession s = new LuceneSession( openSession() );
- Transaction tx = s.beginTransaction();
- Clock clock = new Clock(1, "Seiko");
- s.save( clock );
- clock = new Clock( 2, "Festina");
- s.save( clock );
- Book book = new Book(1, "La chute de la petite reine a travers les yeux de
Festina", "La chute de la petite reine a travers les yeux de Festina,
blahblah");
- s.save(book);
- book = new Book(2, "La gloire de mon p�re", "Les deboires de mon p�re en
v�lo");
- s.save(book);
- tx.commit();//post commit events for lucene
- s.clear();
- tx = s.beginTransaction();
- QueryParser parser = new QueryParser("title", new StopAnalyzer() );
-
- Query query = parser.parse( "summary:noword" );
- org.hibernate.Query hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- ScrollableResults result = hibQuery.scroll();
- assertNotNull( result );
- assertEquals(-1, result.getRowNumber() );
- assertEquals(false, result.next() );
- result.close();
-
- query = parser.parse( "summary:Festina Or brand:Seiko" );
- hibQuery = s.createLuceneQuery( query, Clock.class, Book.class );
- result = hibQuery.scroll();
- assertEquals(0, result.getRowNumber() );
- result.beforeFirst();
- assertEquals( true, result.next() );
- assertTrue( result.isFirst() );
- assertTrue( result.scroll( 1 ) );
- assertTrue( result.isLast() );
- assertFalse( result.scroll( 1 ) );
- result.beforeFirst();
- while ( result.next() ) {
- s.delete( result.get()[0] );
- }
- tx.commit();
- s.close();
- }
-
-
- protected Class[] getMappings() {
- return new Class[] {
- Book.class,
- Clock.class
- };
- }
-}