Hibernate SVN: r12815 - in trunk/HibernateExt/search/src: java/org/hibernate/search/jpa and 3 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-07-25 15:32:50 -0400 (Wed, 25 Jul 2007)
New Revision: 12815
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/FullTextQuery.java
trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
Log:
HSEARCH-100 renaming to setProjection and mark the old name as deprecated
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-07-25 03:55:07 UTC (rev 12814)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-07-25 19:32:50 UTC (rev 12815)
@@ -66,6 +66,12 @@
* If the projected field is not a projectable field, null is returned in the object[]
*
*/
+ FullTextQuery setProjection(String... fields);
+
+ /**
+ * @deprecated Use #setProjection
+ *
+ */
FullTextQuery setIndexProjection(String... fields);
/**
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/FullTextQuery.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/FullTextQuery.java 2007-07-25 03:55:07 UTC (rev 12814)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/FullTextQuery.java 2007-07-25 19:32:50 UTC (rev 12815)
@@ -59,5 +59,5 @@
* If the projected field is not a projectable field, null is returned in the object[]
*
*/
- FullTextQuery setIndexProjection(String... fields);
+ FullTextQuery setProjection(String... fields);
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java 2007-07-25 03:55:07 UTC (rev 12814)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java 2007-07-25 19:32:50 UTC (rev 12815)
@@ -60,8 +60,8 @@
return this;
}
- public FullTextQuery setIndexProjection(String... fields) {
- query.setIndexProjection( fields );
+ public FullTextQuery setProjection(String... fields) {
+ query.setProjection( fields );
return this;
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-25 03:55:07 UTC (rev 12814)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-25 19:32:50 UTC (rev 12815)
@@ -480,7 +480,7 @@
return this;
}
- public FullTextQuery setIndexProjection(String... fields) {
+ public FullTextQuery setProjection(String... fields) {
if ( fields == null || fields.length == 0 ) {
this.indexProjection = null;
}
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2007-07-25 03:55:07 UTC (rev 12814)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2007-07-25 19:32:50 UTC (rev 12815)
@@ -37,7 +37,7 @@
Query query = parser.parse( "dept:ITech" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
// Is the 'FullTextQuery.ID' value correct here? Do we want the Lucene internal document number?
- hibQuery.setIndexProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
ScrollableResults projections = hibQuery.scroll();
@@ -128,7 +128,7 @@
Query query = parser.parse( "dept:ITech" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
- hibQuery.setIndexProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
int counter = 0;
@@ -162,7 +162,7 @@
Query query = parser.parse( "dept:Accounting" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
- hibQuery.setIndexProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
+ hibQuery.setProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.BOOST, FullTextQuery.DOCUMENT, FullTextQuery.ID );
List result = hibQuery.list();
assertNotNull( result );
@@ -180,7 +180,7 @@
assertEquals( "ID incorrect", 1001, projection[7] );
// Change the projection order and null one
- hibQuery.setIndexProjection( FullTextQuery.DOCUMENT, FullTextQuery.THIS, FullTextQuery.SCORE, null, FullTextQuery.ID, "id", "lastname", "dept" );
+ hibQuery.setProjection( FullTextQuery.DOCUMENT, FullTextQuery.THIS, FullTextQuery.SCORE, null, FullTextQuery.ID, "id", "lastname", "dept" );
result = hibQuery.list();
assertNotNull( result );
@@ -238,7 +238,7 @@
Query query = parser.parse( "summary:Festina" );
org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Book.class );
- hibQuery.setIndexProjection( "id", "summary", "mainAuthor.name" );
+ hibQuery.setProjection( "id", "summary", "mainAuthor.name" );
List result = hibQuery.list();
assertNotNull( result );
@@ -249,7 +249,7 @@
assertEquals( "mainAuthor.name (embedded objects)", "Emmanuel", projection[2] );
hibQuery = s.createFullTextQuery( query, Book.class );
- hibQuery.setIndexProjection( "id", "body", "mainAuthor.name" );
+ hibQuery.setProjection( "id", "body", "mainAuthor.name" );
try {
result = hibQuery.list();
@@ -261,14 +261,14 @@
hibQuery = s.createFullTextQuery( query, Book.class );
- hibQuery.setIndexProjection();
+ hibQuery.setProjection();
result = hibQuery.list();
assertNotNull( result );
assertEquals( 1, result.size() );
assertTrue( "Should not trigger projection", result.get( 0 ) instanceof Book );
hibQuery = s.createFullTextQuery( query, Book.class );
- hibQuery.setIndexProjection( null );
+ hibQuery.setProjection( null );
result = hibQuery.list();
assertNotNull( result );
assertEquals( 1, result.size() );
@@ -276,7 +276,7 @@
query = parser.parse( "summary:fleurs" );
hibQuery = s.createFullTextQuery( query, Book.class );
- hibQuery.setIndexProjection( "id", "summary", "mainAuthor.name" );
+ hibQuery.setProjection( "id", "summary", "mainAuthor.name" );
result = hibQuery.list();
assertEquals( 1, result.size() );
projection = (Object[]) result.get( 0 );
17 years, 5 months
Hibernate SVN: r12814 - trunk/HibernateExt/validator/src/java/org/hibernate/validator/resources.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-07-24 23:55:07 -0400 (Tue, 24 Jul 2007)
New Revision: 12814
Added:
trunk/HibernateExt/validator/src/java/org/hibernate/validator/resources/DefaultValidatorMessages_ja.properties
Log:
HV-36 Japanese translation for Hibernate Validator
Added: trunk/HibernateExt/validator/src/java/org/hibernate/validator/resources/DefaultValidatorMessages_ja.properties
===================================================================
--- trunk/HibernateExt/validator/src/java/org/hibernate/validator/resources/DefaultValidatorMessages_ja.properties (rev 0)
+++ trunk/HibernateExt/validator/src/java/org/hibernate/validator/resources/DefaultValidatorMessages_ja.properties 2007-07-25 03:55:07 UTC (rev 12814)
@@ -0,0 +1,16 @@
+validator.assertFalse=\u30a2\u30b5\u30fc\u30b7\u30e7\u30f3\u306b\u5931\u6557\u3057\u307e\u3057\u305f
+validator.assertTrue=\u30a2\u30b5\u30fc\u30b7\u30e7\u30f3\u306b\u5931\u6557\u3057\u307e\u3057\u305f
+validator.future=\u672a\u6765\u306e\u65e5\u4ed8\u3092\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.length={min}\u6587\u5b57\u304b\u3089{max}\u6587\u5b57\u307e\u3067\u306e\u9577\u3055\u3067\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.max={value}\u4ee5\u4e0b\u3092\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.min={value}\u4ee5\u4e0a\u3092\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.notNull=null\u306e\u72b6\u614b\u306f\u8a31\u3055\u308c\u307e\u305b\u3093
+validator.past=\u904e\u53bb\u306e\u65e5\u4ed8\u3092\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.pattern="{regex}"\u306b\u30de\u30c3\u30c1\u3055\u305b\u3066\u304f\u3060\u3055\u3044
+validator.range={min}\u4ee5\u4e0a\u3001{max}\u4ee5\u4e0b\u3092\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.size={min}\u304b\u3089{max}\u307e\u3067\u306e\u30b5\u30a4\u30ba\u3067\u6307\u5b9a\u3057\u3066\u304f\u3060\u3055\u3044
+validator.email=\u96fb\u5b50\u30e1\u30fc\u30eb\u306e\u30a2\u30c9\u30ec\u30b9\u306e\u5f62\u5f0f\u3068\u3057\u3066\u6b63\u3057\u304f\u3042\u308a\u307e\u305b\u3093
+validator.notEmpty=null\u3084\u7a7a\u306e\u72b6\u614b\u306f\u8a31\u3055\u308c\u307e\u305b\u3093
+validator.digits=\u7bc4\u56f2\u5916\u306e\u6570\u5024\u3067\u3059(<{integerDigits}\u6841>.<{fractionalDigits}\u6841>\u307e\u3067\u306e\u6841\u6570\u304c\u8a31\u5bb9\u3055\u308c\u3066\u3044\u307e\u3059)
+validator.creditCard=\u7121\u52b9\u306a\u30af\u30ec\u30b8\u30c3\u30c8\u30ab\u30fc\u30c9\u756a\u53f7\u3067\u3059
+validator.ean=\u7121\u52b9\u306aJAN\u30b3\u30fc\u30c9\u3067\u3059
17 years, 5 months
Hibernate SVN: r12813 - in trunk/HibernateExt/search/src: java/org/hibernate/search/annotations and 6 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2007-07-24 22:58:26 -0400 (Tue, 24 Jul 2007)
New Revision: 12813
Added:
trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextFilter.java
trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Factory.java
trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDef.java
trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDefs.java
trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Key.java
trunk/HibernateExt/search/src/java/org/hibernate/search/engine/FilterDef.java
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/ChainedFilter.java
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterCachingStrategy.java
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterKey.java
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java
trunk/HibernateExt/search/src/java/org/hibernate/search/filter/StandardFilterKey.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextFilterImpl.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/
trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/BestDriversFilter.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/Driver.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/FilterTest.java
trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/SecurityFilterFactory.java
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java
trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
Log:
HSEARCH-15 provide ftQuery.setFilter(Filter) (Hardy Ferentschik)
HSEARCH-58 Support named Filters (and caching)
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java 2007-07-24 20:35:47 UTC (rev 12812)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/Environment.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -56,4 +56,12 @@
* define the reader strategy used
*/
public static final String READER_STRATEGY = READER_PREFIX + "strategy";
+ /**
+ * filter caching strategy
+ */
+ public static final String FILTER_CACHING_STRATEGY_PREFIX = "hibernate.search.filter.cache_strategy.";
+ /**
+ * filter caching strategy class (must have a no-arg constructor and implements FilterCachingStrateg
+ */
+ public static final String FILTER_CACHING_STRATEGY_IMPL = "hibernate.search.filter.cache_strategy.impl";
}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextFilter.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextFilter.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextFilter.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,13 @@
+//$Id$
+package org.hibernate.search;
+
+/**
+ * represents a FullTextFilter that is about to be applied
+ * Used to inject parameters
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextFilter {
+ FullTextFilter setParameter(String name, Object value);
+ Object getParameter(String name);
+}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-07-24 20:35:47 UTC (rev 12812)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/FullTextQuery.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -1,9 +1,10 @@
//$Id$
package org.hibernate.search;
+import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Sort;
+import org.hibernate.Criteria;
import org.hibernate.Query;
-import org.hibernate.Criteria;
/**
* The base interface for lucene powered searches.
@@ -24,6 +25,15 @@
* @return this for method chaining
*/
FullTextQuery setSort(Sort sort);
+
+ /**
+ * Allows to use lucene filters.
+ * Semi-deprecated? a preferred way is to use the @FullTextFilterDef approach
+ *
+ * @param filter The lucene filter.
+ * @return this for method chaining
+ */
+ FullTextQuery setFilter(Filter filter);
/**
* Returns the number of hits for this search
@@ -57,4 +67,15 @@
*
*/
FullTextQuery setIndexProjection(String... fields);
+
+ /**
+ * Enable a given filter by its name. Returns a FullTextFilter object that allows filter parameter injection
+ */
+ FullTextFilter enableFullTextFilter(String name);
+
+ /**
+ * Disable a given filter by its name
+ */
+ void disableFullTextFilter(String name);
+
}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Factory.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Factory.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Factory.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,24 @@
+//$Id$
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Marks a method as a factory method for a given type.
+ * A factory method is called whenever a new instance of a given
+ * type is requested.
+ * The factory method is used with a higher priority than a plain no-arg constructor when present
+ *
+ * @Factory currently works for @FullTextFilterDef.impl classes
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.METHOD )
+@Documented
+public @interface Factory {
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDef.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDef.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDef.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,35 @@
+//$Id$
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Defines a FullTextFilter that can be optionally applied to
+ * every FullText Queries
+ * While not related to a specific indexed entity, the annotation has to be set on one of them
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE } )
+@Documented
+public @interface FullTextFilterDef {
+ /**
+ * Filter name. Must be unique accross all mappings for a given persistence unit
+ */
+ String name();
+
+ /**
+ * Either implements org.apache.lucene.search.Filter
+ * or contains a @Factory method returning one.
+ * The Filter generated must be thread-safe
+ *
+ * If the filter accept parameters, an @Key method must be present as well
+ *
+ */
+ Class impl();
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDefs.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDefs.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/FullTextFilterDefs.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,20 @@
+//$Id$
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * A list of FullTextFilterDef
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE } )
+@Documented
+public @interface FullTextFilterDefs {
+ FullTextFilterDef[] value();
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Key.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Key.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/annotations/Key.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,25 @@
+//$Id$
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Marks a method as a key constructor for a given type.
+ * A key is an object that uniquely identify a given object type and a given set of parameters
+ *
+ * The key object must implement equals / hashcode so that 2 keys are equals iif
+ * the given target object types are the same, the set of parameters are the same.
+ *
+ * @Factory currently works for @FullTextFilterDef.impl classes
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.METHOD )
+@Documented
+public @interface Key {
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/FilterDef.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/FilterDef.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/FilterDef.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,63 @@
+//$Id$
+package org.hibernate.search.engine;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hibernate.search.SearchException;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//TODO serialization
+public class FilterDef {
+ private Class impl;
+ private Method factoryMethod;
+ private Method keyMethod;
+ private Map<String, Method> setters = new HashMap<String, Method>();
+
+ public Class getImpl() {
+ return impl;
+ }
+
+ public void setImpl(Class impl) {
+ this.impl = impl;
+ }
+
+ public Method getFactoryMethod() {
+ return factoryMethod;
+ }
+
+ public void setFactoryMethod(Method factoryMethod) {
+ this.factoryMethod = factoryMethod;
+ }
+
+ public Method getKeyMethod() {
+ return keyMethod;
+ }
+
+ public void setKeyMethod(Method keyMethod) {
+ this.keyMethod = keyMethod;
+ }
+
+ public void addSetter(String name, Method method) {
+ if ( method.isAccessible() ) method.setAccessible( true );
+ setters.put( name, method );
+ }
+
+ public void invoke(String parameterName, Object filter, Object parameterValue) {
+ Method method = setters.get( parameterName );
+ if ( method == null ) throw new SearchException( "No setter " + parameterName + " found in " + this.impl );
+ try {
+ method.invoke( filter, parameterValue );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to set Filter parameter: " + parameterName + " on filter class: " + this.impl, e );
+ }
+ catch (InvocationTargetException e) {
+ throw new SearchException( "Unable to set Filter parameter: " + parameterName + " on filter class: " + this.impl, e );
+ }
+ }
+}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2007-07-24 20:35:47 UTC (rev 12812)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -5,6 +5,7 @@
import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.search.SearchFactory;
+import org.hibernate.search.filter.FilterCachingStrategy;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.optimization.OptimizerStrategy;
import org.hibernate.search.backend.BackendQueueProcessorFactory;
@@ -30,7 +31,11 @@
void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy);
- public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider);
+ OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider);
+
+ FilterCachingStrategy getFilterCachingStrategy();
+
+ FilterDef getFilterDefinition(String name);
public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider );
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/filter/ChainedFilter.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/filter/ChainedFilter.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/filter/ChainedFilter.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,43 @@
+//$Id$
+package org.hibernate.search.filter;
+
+import java.util.BitSet;
+import java.util.List;
+import java.util.ArrayList;
+import java.io.IOException;
+
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ChainedFilter extends Filter {
+ private List<Filter> chainedFilters = new ArrayList<Filter>();
+
+
+ public void addFilter(Filter filter) {
+ this.chainedFilters.add( filter );
+ }
+
+ public BitSet bits(IndexReader reader) throws IOException {
+ if (chainedFilters.size() == 0) throw new AssertionFailure("Chainedfilter has no filters to chain for");
+ //we need to copy the first BitSet because BitSet is modified by .logicalOp
+ Filter filter = chainedFilters.get( 0 );
+ BitSet result = (BitSet) filter.bits( reader ).clone();
+ for (int index = 1 ; index < chainedFilters.size() ; index++) {
+ result.and( chainedFilters.get( index ).bits( reader ) );
+ }
+ return result;
+ }
+
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ChainedFilter [");
+ for (Filter filter : chainedFilters) {
+ sb.append( "\n ").append( filter.toString() );
+ }
+ return sb.append("\n]" ).toString();
+ }
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterCachingStrategy.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterCachingStrategy.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterCachingStrategy.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,28 @@
+//$Id$
+package org.hibernate.search.filter;
+
+import java.util.Properties;
+
+import org.apache.lucene.search.Filter;
+
+/**
+ * Defines the caching filter strategy
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FilterCachingStrategy {
+ /**
+ * initialize the strategy from the properties
+ * The Properties must not be changed
+ */
+ void initialize(Properties properties);
+ /**
+ * Retrieve the cached filter for a given key or null if not cached
+ */
+ Filter getCachedFilter(FilterKey key);
+
+ /**
+ * Propose a candidate filter for caching
+ */
+ void addCachedFilter(FilterKey key, Filter filter);
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterKey.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterKey.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/filter/FilterKey.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,31 @@
+//$Id$
+package org.hibernate.search.filter;
+
+/**
+ * The key object must implement equals / hashcode so that 2 keys are equals if and only if
+ * the given Filter types are the same and the set of parameters are the same.
+ *
+ * The FilterKey creator (ie the @Key method) does not have to inject <code>impl</code>
+ * It will be done by Hibernate Search
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class FilterKey {
+
+ private Class impl;
+
+ /**
+ * Represent the @FullTextFilterDef.impl class
+ */
+ public Class getImpl() {
+ return impl;
+ }
+
+ public void setImpl(Class impl) {
+ this.impl = impl;
+ }
+
+ public abstract int hashCode();
+
+ public abstract boolean equals(Object obj);
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,46 @@
+//$Id$
+package org.hibernate.search.filter;
+
+import java.util.Properties;
+
+import org.apache.lucene.search.Filter;
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.util.SoftLimitMRUCache;
+
+/**
+ * Keep the most recently used Filters in the cache
+ * The cache is at least as big as <code>hibernate.search.filter.cache_strategy.size</code>
+ * Above this limit, Filters are kept as soft references
+ *
+ * @author Emmanuel Bernard
+ */
+public class MRUFilterCachingStrategy implements FilterCachingStrategy {
+ private static final String DEFAULT_SIZE = "128";
+ private SoftLimitMRUCache cache;
+
+ public void initialize(Properties properties) {
+ int size;
+ try {
+ size = Integer.parseInt(
+ properties.getProperty( Environment.FILTER_CACHING_STRATEGY_PREFIX + "size", DEFAULT_SIZE )
+ );
+ }
+ catch (NumberFormatException nfe) {
+ throw new SearchException(
+ "Unable to parse " + Environment.FILTER_CACHING_STRATEGY_PREFIX + "size: "
+ + properties.getProperty( Environment.FILTER_CACHING_STRATEGY_PREFIX + "size", DEFAULT_SIZE ), nfe
+ );
+ }
+
+ cache = new SoftLimitMRUCache( size );
+ }
+
+ public Filter getCachedFilter(FilterKey key) {
+ return (Filter) cache.get( key );
+ }
+
+ public void addCachedFilter(FilterKey key, Filter filter) {
+ cache.put( key, filter );
+ }
+}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/filter/StandardFilterKey.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/filter/StandardFilterKey.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/filter/StandardFilterKey.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,59 @@
+//$Id$
+package org.hibernate.search.filter;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * Implements a filter key usign all injected parameters to compute
+ * equals and hashCode
+ * the order the parameters are added is significant
+ *
+ * @author Emmanuel Bernard
+ */
+public class StandardFilterKey extends FilterKey {
+ private List parameters = new ArrayList();
+ private boolean implSet;
+
+
+ public void setImpl(Class impl) {
+ super.setImpl( impl );
+ //add impl once and only once
+ if (implSet) {
+ parameters.set( 0, impl );
+ }
+ else {
+ implSet = true;
+ parameters.add( 0, impl );
+ }
+ }
+
+ public void addParameter(Object value) {
+ parameters.add( value );
+ }
+ public int hashCode() {
+ int hash = 23;
+ for (Object param : parameters) {
+ hash = 31*hash + (param != null ? param.hashCode() : 0);
+ }
+ return hash;
+ }
+
+ public boolean equals(Object obj) {
+ if ( ! ( obj instanceof StandardFilterKey ) ) return false;
+ StandardFilterKey that = (StandardFilterKey) obj;
+ int size = parameters.size();
+ if ( size != that.parameters.size() ) return false;
+ for (int index = 0 ; index < size; index++) {
+ Object paramThis = parameters.get( index );
+ Object paramThat = that.parameters.get( index );
+ if (paramThis == null && paramThat != null) return false;
+ if (paramThis != null && ! paramThis.equals( paramThat ) ) return false;
+ }
+ return true;
+ }
+}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2007-07-24 20:35:47 UTC (rev 12812)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -1,26 +1,36 @@
//$Id$
package org.hibernate.search.impl;
-import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
-import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Properties;
import java.util.concurrent.locks.ReentrantLock;
+import java.lang.reflect.Method;
+import java.beans.Introspector;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.annotations.common.util.StringHelper;
import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.search.Environment;
import org.hibernate.search.SearchException;
import org.hibernate.search.Version;
+import org.hibernate.search.filter.FilterCachingStrategy;
+import org.hibernate.search.filter.MRUFilterCachingStrategy;
import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.annotations.Key;
+import org.hibernate.search.annotations.FullTextFilterDefs;
import org.hibernate.search.backend.BackendQueueProcessorFactory;
import org.hibernate.search.backend.LuceneIndexingParameters;
import org.hibernate.search.backend.LuceneWork;
@@ -29,6 +39,7 @@
import org.hibernate.search.backend.WorkerFactory;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.engine.FilterDef;
import org.hibernate.search.reader.ReaderProvider;
import org.hibernate.search.reader.ReaderProviderFactory;
import org.hibernate.search.store.DirectoryProvider;
@@ -37,7 +48,6 @@
import org.hibernate.util.ReflectHelper;
/**
- *
* @author Emmanuel Bernard
*/
public class SearchFactoryImpl implements SearchFactoryImplementor {
@@ -47,7 +57,7 @@
static {
Version.touch();
}
-
+
private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
//keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
@@ -57,8 +67,9 @@
private Worker worker;
private ReaderProvider readerProvider;
private BackendQueueProcessorFactory backendQueueProcessorFactory;
-
-
+ private Map<String, FilterDef> filterDefinitions;
+ private FilterCachingStrategy filterCachingStrategy;
+
/**
* Each directory provider (index) can have its own performance settings.
*/
@@ -81,15 +92,72 @@
Analyzer analyzer = initAnalyzer(cfg);
initDocumentBuilders(cfg, reflectionManager, analyzer);
-
+
Set<Class> indexedClasses = documentBuilders.keySet();
- for (DocumentBuilder builder : documentBuilders.values()) {
+ for (DocumentBuilder builder : documentBuilders.values()) {
builder.postInitialize( indexedClasses );
}
worker = WorkerFactory.createWorker( cfg, this );
readerProvider = ReaderProviderFactory.createReaderProvider( cfg, this );
+ buildFilterCachingStrategy( cfg.getProperties() );
}
+ private void bindFilterDefs(XClass mappedXClass) {
+ filterDefinitions = new HashMap<String, FilterDef>();
+ FullTextFilterDef defAnn = mappedXClass.getAnnotation( FullTextFilterDef.class );
+ if ( defAnn != null ) {
+ bindFilterDef( defAnn, mappedXClass );
+ }
+ FullTextFilterDefs defsAnn = mappedXClass.getAnnotation( FullTextFilterDefs.class );
+ if (defsAnn != null) {
+ for ( FullTextFilterDef def : defsAnn.value() ) {
+ bindFilterDef( def, mappedXClass );
+ }
+ }
+ }
+
+ private void bindFilterDef(FullTextFilterDef defAnn, XClass mappedXClass) {
+ if ( filterDefinitions.containsKey( defAnn.name() ) ) {
+ throw new SearchException("Multiple definition of @FullTextFilterDef.name=" + defAnn.name() + ": "
+ + mappedXClass.getName() );
+ }
+ FilterDef filterDef = new FilterDef();
+ filterDef.setImpl( defAnn.impl() );
+ try {
+ filterDef.getImpl().newInstance();
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
+ }
+ catch (InstantiationException e) {
+ throw new SearchException("Unable to create Filter class: " + filterDef.getImpl().getName(), e);
+ }
+ for ( Method method : filterDef.getImpl().getMethods() ) {
+ if ( method.isAnnotationPresent( Factory.class ) ) {
+ if ( filterDef.getFactoryMethod() != null ) {
+ throw new SearchException("Multiple @Factory methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName() );
+ }
+ if ( !method.isAccessible() ) method.setAccessible( true );
+ filterDef.setFactoryMethod( method );
+ }
+ if ( method.isAnnotationPresent( Key.class ) ) {
+ if ( filterDef.getKeyMethod() != null ) {
+ throw new SearchException("Multiple @Key methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName() );
+ }
+ if ( !method.isAccessible() ) method.setAccessible( true );
+ filterDef.setKeyMethod( method );
+ }
+
+ String name = method.getName();
+ if ( name.startsWith( "set" ) && method.getParameterTypes().length == 1 ) {
+ filterDef.addSetter( Introspector.decapitalize( name.substring( 3 ) ), method );
+ }
+ }
+ filterDefinitions.put( defAnn.name(), filterDef );
+ }
+
//code doesn't have to be multithreaded because SF creation is not.
//this is not a public API, should really only be used during the SessionFActory building
//FIXME this is ugly, impl.staticmethod, fix that
@@ -124,7 +192,7 @@
public void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy) {
dirProviderOptimizerStrategies.put( provider, optimizerStrategy );
}
-
+
public void addIndexingParmeters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams) {
dirProviderIndexingParams.put( provider, indexingParams );
}
@@ -132,10 +200,10 @@
public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider) {
return dirProviderOptimizerStrategies.get( provider );
}
-
+
public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider ) {
return dirProviderIndexingParams.get( provider );
- }
+ }
public ReaderProvider getReaderProvider() {
return readerProvider;
@@ -178,7 +246,7 @@
queue.add( new OptimizeLuceneWork( entityType ) );
getBackendQueueProcessorFactory().getProcessor( queue ).run();
}
-
+
private void initDocumentBuilders(Configuration cfg, ReflectionManager reflectionManager, Analyzer analyzer) {
Iterator iter = cfg.getClassMappings();
DirectoryProviderFactory factory = new DirectoryProviderFactory();
@@ -187,16 +255,20 @@
Class<?> mappedClass = clazz.getMappedClass();
if (mappedClass != null) {
XClass mappedXClass = reflectionManager.toXClass(mappedClass);
- if (mappedXClass != null && mappedXClass.isAnnotationPresent(Indexed.class)) {
- DirectoryProvider provider = factory.createDirectoryProvider(mappedXClass, cfg, this);
- // TODO move that into DirectoryProviderFactory
- if (!lockableDirectoryProviders.containsKey(provider)) {
- lockableDirectoryProviders.put(provider, new ReentrantLock());
+ if ( mappedXClass != null) {
+ if ( mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+ DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg, this );
+ //TODO move that into DirectoryProviderFactory
+ if ( !lockableDirectoryProviders.containsKey( provider ) ) {
+ lockableDirectoryProviders.put( provider, new ReentrantLock() );
+ }
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
+ mappedXClass, analyzer, provider, reflectionManager
+ );
+
+ documentBuilders.put( mappedClass, documentBuilder );
}
- final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(mappedXClass, analyzer,
- provider, reflectionManager);
-
- documentBuilders.put(mappedClass, documentBuilder);
+ bindFilterDefs(mappedXClass);
}
}
}
@@ -204,7 +276,7 @@
/**
* Initilises the Lucene analyzer to use by reading the analyzer class from the configuration and instantiating it.
- *
+ *
* @param cfg
* The current configuration.
* @return The Lucene analyzer to use for tokenisation.
@@ -233,5 +305,36 @@
throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClassName, e);
}
return defaultAnalyzer;
- }
+ }
+
+ private void buildFilterCachingStrategy(Properties properties) {
+ String impl = properties.getProperty( Environment.FILTER_CACHING_STRATEGY_IMPL );
+ if ( StringHelper.isEmpty( impl ) || "mru".equalsIgnoreCase( impl ) ) {
+ filterCachingStrategy = new MRUFilterCachingStrategy();
+ }
+ else {
+ try {
+ Class filterCachingStrategyClass = org.hibernate.annotations.common.util.ReflectHelper.classForName( impl, SearchFactoryImpl.class );
+ filterCachingStrategy = (FilterCachingStrategy) filterCachingStrategyClass.newInstance();
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find filterCachingStrategy class: " + impl, e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instanciate filterCachingStrategy class: " + impl, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instanciate filterCachingStrategy class: " + impl, e );
+ }
+ }
+ filterCachingStrategy.initialize( properties );
+ }
+
+ public FilterCachingStrategy getFilterCachingStrategy() {
+ return filterCachingStrategy;
+ }
+
+ public FilterDef getFilterDefinition(String name) {
+ return filterDefinitions.get( name );
+ }
}
Added: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextFilterImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextFilterImpl.java (rev 0)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextFilterImpl.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,37 @@
+//$Id$
+package org.hibernate.search.query;
+
+import java.util.Map;
+import java.util.HashMap;
+
+import org.hibernate.search.FullTextFilter;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FullTextFilterImpl implements FullTextFilter {
+ private Map<String, Object> parameters = new HashMap<String, Object>();
+ private String name;
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public FullTextFilter setParameter(String name, Object value) {
+ parameters.put( name, value );
+ return this;
+ }
+
+ public Object getParameter(String name) {
+ return parameters.get( name );
+ }
+
+
+ public Map<String, Object> getParameters() {
+ return parameters;
+ }
+}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-24 20:35:47 UTC (rev 12812)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -4,17 +4,20 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.lang.reflect.InvocationTargetException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Searcher;
@@ -32,16 +35,20 @@
import org.hibernate.engine.query.ParameterMetadata;
import org.hibernate.impl.AbstractQueryImpl;
import org.hibernate.impl.CriteriaImpl;
+import org.hibernate.search.FullTextFilter;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.SearchException;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.DocumentExtractor;
import org.hibernate.search.engine.EntityInfo;
+import org.hibernate.search.engine.FilterDef;
import org.hibernate.search.engine.Loader;
import org.hibernate.search.engine.ObjectLoader;
import org.hibernate.search.engine.ProjectionLoader;
import org.hibernate.search.engine.QueryLoader;
import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.filter.ChainedFilter;
+import org.hibernate.search.filter.FilterKey;
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.util.ContextHelper;
@@ -61,8 +68,11 @@
private Integer maxResults;
private Integer resultSize;
private Sort sort;
+ private Filter filter;
private Criteria criteria;
private String[] indexProjection;
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private Map<String, FullTextFilterImpl> filterDefinitions;
/**
* classes must be immutable
@@ -75,13 +85,23 @@
this.classes = classes;
}
-
+ /**
+ * {@inheritDoc}
+ */
public FullTextQuery setSort(Sort sort) {
this.sort = sort;
return this;
}
/**
+ * {@inheritDoc}
+ */
+ public FullTextQuery setFilter(Filter filter) {
+ this.filter = filter;
+ return this;
+ }
+
+ /**
* Return an interator on the results.
* Retrieve the object one by one (initialize it during the next() operation)
*/
@@ -242,16 +262,104 @@
private Hits getHits(Searcher searcher) throws IOException {
Hits hits;
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
- if ( sort == null ) {
- hits = searcher.search( query );
- }
- else {
- hits = searcher.search( query, sort );
- }
+ buildFilters();
+ hits = searcher.search( query, filter, sort );
setResultSize( hits );
return hits;
}
+ private void buildFilters() {
+ SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
+ if ( filterDefinitions != null && filterDefinitions.size() > 0 ) {
+ ChainedFilter chainedFilter = new ChainedFilter();
+ for ( FullTextFilterImpl filterDefinition : filterDefinitions.values() ) {
+ FilterDef def = searchFactoryImplementor.getFilterDefinition( filterDefinition.getName() );
+ Class implClass = def.getImpl();
+ Object instance;
+ try {
+ instance = implClass.newInstance();
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
+ }
+ for ( Map.Entry<String, Object> entry : filterDefinition.getParameters().entrySet() ) {
+ def.invoke( entry.getKey(), instance, entry.getValue() );
+ }
+ if ( def.getKeyMethod() == null && filterDefinition.getParameters().size() > 0 ) {
+ throw new SearchException("Filter with parameters and no @Key method: " + filterDefinition.getName() );
+ }
+ FilterKey key;
+ if ( def.getKeyMethod() == null ) {
+ key = new FilterKey( ) {
+ public int hashCode() {
+ return getImpl().hashCode();
+ }
+
+ public boolean equals(Object obj) {
+ if ( ! ( obj instanceof FilterKey ) ) return false;
+ FilterKey that = (FilterKey) obj;
+ return this.getImpl().equals( that.getImpl() );
+ }
+ };
+ }
+ else {
+ try {
+ key = (FilterKey) def.getKeyMethod().invoke( instance );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException("Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ }
+ catch (InvocationTargetException e) {
+ throw new SearchException("Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ }
+ catch (ClassCastException e) {
+ throw new SearchException("@Key method does not return FilterKey: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ }
+ }
+ key.setImpl( def.getImpl() );
+ Filter filter = searchFactoryImplementor.getFilterCachingStrategy().getCachedFilter( key );
+ if (filter == null) {
+ if ( def.getFactoryMethod() != null ) {
+ try {
+ filter = (Filter) def.getFactoryMethod().invoke( instance );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException("Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ }
+ catch (InvocationTargetException e) {
+ throw new SearchException("Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ }
+ catch (ClassCastException e) {
+ throw new SearchException("@Key method does not return a org.apache.lucene.search.Filter class: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ }
+ }
+ else {
+ try {
+ filter = (Filter) instance;
+ }
+ catch (ClassCastException e) {
+ throw new SearchException("@Key method does not return a org.apache.lucene.search.Filter class: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ }
+ }
+ searchFactoryImplementor.getFilterCachingStrategy().addCachedFilter( key, filter );
+ }
+ chainedFilter.addFilter( filter );
+ }
+ if ( filter != null ) chainedFilter.addFilter( filter );
+ filter = chainedFilter;
+ }
+ }
+
private org.apache.lucene.search.Query filterQueryByClasses(org.apache.lucene.search.Query luceneQuery) {
//A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
//it also probably minimise the memory footprint
@@ -404,6 +512,34 @@
return null;
}
+ public FullTextFilter enableFullTextFilter(String name) {
+ if ( filterDefinitions == null ) {
+ filterDefinitions = new HashMap<String, FullTextFilterImpl>();
+ }
+ FullTextFilterImpl filterDefinition = filterDefinitions.get( name );
+ if ( filterDefinition != null ) return filterDefinition;
+
+ filterDefinition = new FullTextFilterImpl();
+ filterDefinition.setName( name );
+ FilterDef filterDef = getSearchFactoryImplementor().getFilterDefinition( name );
+ if (filterDef == null) {
+ throw new SearchException("Unkown @FullTextFilter: " + name);
+ }
+ filterDefinitions.put(name, filterDefinition);
+ return filterDefinition;
+ }
+
+ public void disableFullTextFilter(String name) {
+ filterDefinitions.remove( name );
+ }
+
+ private SearchFactoryImplementor getSearchFactoryImplementor() {
+ if ( searchFactoryImplementor == null ) {
+ searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
+ }
+ return searchFactoryImplementor;
+ }
+
private static Loader noLoader = new Loader() {
public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/BestDriversFilter.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/BestDriversFilter.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/BestDriversFilter.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,25 @@
+//$Id$
+package org.hibernate.search.test.filter;
+
+import java.util.BitSet;
+import java.io.IOException;
+
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BestDriversFilter extends Filter {
+
+ public BitSet bits(IndexReader reader) throws IOException {
+ BitSet bitSet = new BitSet( reader.maxDoc() );
+ TermDocs termDocs = reader.termDocs( new Term("score", "5") );
+ while ( termDocs.next() ) {
+ bitSet.set( termDocs.doc() );
+ }
+ return bitSet;
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/Driver.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/Driver.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/Driver.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,104 @@
+//$Id$
+package org.hibernate.search.test.filter;
+
+import java.util.Date;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.FullTextFilterDefs;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@FullTextFilterDefs( {
+ @FullTextFilterDef(name = "bestDriver", impl = BestDriversFilter.class), //actual Filter implementation
+ @FullTextFilterDef(name = "security", impl = SecurityFilterFactory.class) //Filter factory with parameters
+})
+public class Driver {
+ @Id
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String name;
+ @Field(index= Index.UN_TOKENIZED)
+ private String teacher;
+ @Field(index= Index.UN_TOKENIZED)
+ private int score;
+ @Field(index= Index.UN_TOKENIZED)
+ @DateBridge( resolution = Resolution.YEAR)
+ private Date delivery;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getTeacher() {
+ return teacher;
+ }
+
+ public void setTeacher(String teacher) {
+ this.teacher = teacher;
+ }
+
+ public int getScore() {
+ return score;
+ }
+
+ public void setScore(int score) {
+ this.score = score;
+ }
+
+ public Date getDelivery() {
+ return delivery;
+ }
+
+ public void setDelivery(Date delivery) {
+ this.delivery = delivery;
+ }
+
+ public boolean equals(Object o) {
+ if ( this == o ) return true;
+ if ( o == null || getClass() != o.getClass() ) return false;
+
+ Driver driver = (Driver) o;
+
+ if ( id != driver.id ) return false;
+ if ( score != driver.score ) return false;
+ if ( delivery != null ? !delivery.equals( driver.delivery ) : driver.delivery != null ) return false;
+ if ( name != null ? !name.equals( driver.name ) : driver.name != null ) return false;
+ if ( teacher != null ? !teacher.equals( driver.teacher ) : driver.teacher != null ) return false;
+
+ return true;
+ }
+
+ public int hashCode() {
+ int result;
+ result = id;
+ result = 31 * result + ( name != null ? name.hashCode() : 0 );
+ result = 31 * result + ( teacher != null ? teacher.hashCode() : 0 );
+ result = 31 * result + score;
+ result = 31 * result + ( delivery != null ? delivery.hashCode() : 0 );
+ return result;
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/FilterTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/FilterTest.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/FilterTest.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,137 @@
+//$Id$
+package org.hibernate.search.test.filter;
+
+import java.util.Date;
+import java.util.Calendar;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.Session;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.RangeFilter;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class FilterTest extends SearchTestCase {
+
+ public void testNamedFilters() {
+ createData();
+ FullTextSession s = Search.createFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term("teacher", "andre") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "max") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "aaron") ), BooleanClause.Occur.SHOULD );
+ FullTextQuery ftQuery = s.createFullTextQuery( query, Driver.class );
+ assertEquals("No filter should happen", 3, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ assertEquals("Should filter out Gavin", 2, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ assertEquals("Should filter to limit to Emmanuel", 1, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ ftQuery.disableFullTextFilter( "security");
+ ftQuery.disableFullTextFilter( "bestDriver");
+ assertEquals("Should not filter anymore", 3, ftQuery.getResultSize() );
+
+ s.getTransaction().commit();
+ s.close();
+ deleteData();
+ }
+
+ public void testStraightFilters() {
+ createData();
+ FullTextSession s = Search.createFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term("teacher", "andre") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "max") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "aaron") ), BooleanClause.Occur.SHOULD );
+ FullTextQuery ftQuery;
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ Filter dateFilter = new RangeFilter("delivery", "2001", "2005",
+ true, true);
+ ftQuery.setFilter( dateFilter );
+ assertEquals("Should select only liz", 1, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.setFilter( dateFilter );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ ftQuery.disableFullTextFilter( "security");
+ ftQuery.disableFullTextFilter( "bestDriver");
+ ftQuery.setFilter( null );
+ assertEquals("Should not filter anymore", 3, ftQuery.getResultSize() );
+
+ s.getTransaction().commit();
+ s.close();
+ deleteData();
+ }
+
+
+ private void deleteData() {
+ Session s = openSession( );
+ s.getTransaction().begin();
+ s.createQuery( "delete " + Driver.class.getName() + " t").executeUpdate();
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ private void createData() {
+ Session s = openSession( );
+ s.getTransaction().begin();
+ Calendar cal = Calendar.getInstance();
+ cal.set( 2006, 10, 11);
+ Driver driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 1 );
+ driver.setName( "Emmanuel" );
+ driver.setScore( 5 );
+ driver.setTeacher( "andre" );
+ s.persist( driver );
+
+ cal.set( 2007, 10, 11);
+ driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 2 );
+ driver.setName( "Gavin" );
+ driver.setScore( 3 );
+ driver.setTeacher( "aaron" );
+ s.persist( driver );
+
+ cal.set( 2004, 10, 11);
+ driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 3 );
+ driver.setName( "Liz" );
+ driver.setScore( 5 );
+ driver.setTeacher( "max" );
+ s.persist( driver );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Driver.class
+ };
+ }
+}
Added: trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/SecurityFilterFactory.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/SecurityFilterFactory.java (rev 0)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/filter/SecurityFilterFactory.java 2007-07-25 02:58:26 UTC (rev 12813)
@@ -0,0 +1,43 @@
+//$Id$
+package org.hibernate.search.test.filter;
+
+import org.hibernate.search.filter.FilterKey;
+import org.hibernate.search.filter.StandardFilterKey;
+import org.hibernate.search.annotations.Key;
+import org.hibernate.search.annotations.Factory;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.QueryFilter;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.QueryWrapperFilter;
+import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.index.Term;
+
+/**
+ * Apply a security filter to the results
+ *
+ * @author Emmanuel Bernard
+ */
+public class SecurityFilterFactory {
+ private String login;
+
+ /**
+ * injected parameter
+ */
+ public void setLogin(String login) {
+ this.login = login;
+ }
+
+ @Key
+ public FilterKey getKey() {
+ StandardFilterKey key = new StandardFilterKey();
+ key.addParameter( login );
+ return key;
+ }
+
+ @Factory
+ public Filter getFilter() {
+ Query query = new TermQuery( new Term("teacher", login) );
+ return new CachingWrapperFilter( new QueryWrapperFilter(query) );
+ }
+}
17 years, 5 months
Hibernate SVN: r12811 - in core/trunk/cache-jbosscache2/src/main/java: org and 5 other directories.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2007-07-24 16:34:07 -0400 (Tue, 24 Jul 2007)
New Revision: 12811
Added:
core/trunk/cache-jbosscache2/src/main/java/org/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/BasicRegionAdapter.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/CacheInstanceManager.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/JBossCacheRegionFactory.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/InvalidationCacheInstanceManager.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/MultiplexingCacheInstanceManager.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/EntityRegionImpl.java
core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/ReadOnlyAccess.java
Log:
partial jbosscache2.x support
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/BasicRegionAdapter.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/BasicRegionAdapter.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/BasicRegionAdapter.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.jboss.cache.Cache;
+import org.jboss.cache.Fqn;
+import org.jboss.cache.config.Option;
+
+import org.hibernate.cache.CacheException;
+import org.hibernate.cache.Region;
+
+/**
+ * General support for writing {@link Region} implementations for
+ *
+ *
+ * @author Steve Ebersole
+ */
+public abstract class BasicRegionAdapter implements Region {
+ public static final String ITEM = "item";
+
+ protected final Cache jbcCache;
+ protected final String regionName;
+ protected final Fqn regionFqn;
+
+ public BasicRegionAdapter(Cache jbcCache, String regionName) {
+ this.jbcCache = jbcCache;
+ this.regionName = regionName;
+ this.regionFqn = Fqn.fromString( regionName.replace( '.', '/' ) );
+ activateLocalClusterNode();
+ }
+
+ private void activateLocalClusterNode() {
+ org.jboss.cache.Region jbcRegion = jbcCache.getRegion( regionFqn, true );
+ if ( jbcRegion.isActive() ) {
+ return;
+ }
+ ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+ if ( classLoader == null ) {
+ classLoader = getClass().getClassLoader();
+ }
+ jbcRegion.registerContextClassLoader( classLoader );
+ jbcRegion.activate();
+ }
+
+ public String getName() {
+ return regionName;
+ }
+
+ public void destroy() throws CacheException {
+ try {
+ // NOTE : this is being used from the process of shutting down a
+ // SessionFactory. Specific things to consider:
+ // (1) this clearing of the region should not propogate to
+ // other nodes on the cluster (if any); this is the
+ // cache-mode-local option bit...
+ // (2) really just trying a best effort to cleanup after
+ // ourselves; lock failures, etc are not critical here;
+ // this is the fail-silently option bit...
+ Option option = new Option();
+ option.setCacheModeLocal( true );
+ option.setFailSilently( true );
+ jbcCache.getInvocationContext().setOptionOverrides( option );
+ jbcCache.removeNode( regionFqn );
+ deactivateLocalNode();
+ }
+ catch( Exception e ) {
+ throw new CacheException( e );
+ }
+ }
+
+ private void deactivateLocalNode() {
+ org.jboss.cache.Region jbcRegion = jbcCache.getRegion( regionFqn, false );
+ if ( jbcRegion != null && jbcRegion.isActive() ) {
+ jbcRegion.deactivate();
+ jbcRegion.unregisterContextClassLoader();
+ }
+ }
+
+ public long getSizeInMemory() {
+ // not supported
+ return -1;
+ }
+
+ public long getElementCountInMemory() {
+ try {
+ Set children = jbcCache.getRoot().getChild( regionFqn ).getChildrenNames();
+ return children == null ? 0 : children.size();
+ }
+ catch ( Exception e ) {
+ throw new CacheException( e );
+ }
+ }
+
+ public long getElementCountOnDisk() {
+ return -1;
+ }
+
+ public Map toMap() {
+ try {
+ Map result = new HashMap();
+ Set childrenNames = jbcCache.getRoot().getChild( regionFqn ).getChildrenNames();
+ if (childrenNames != null) {
+ for ( Object childName : childrenNames ) {
+ result.put( childName, jbcCache.get( new Fqn( regionFqn, childName ), ITEM ) );
+ }
+ }
+ return result;
+ }
+ catch (Exception e) {
+ throw new CacheException(e);
+ }
+ }
+
+ public long nextTimestamp() {
+ return System.currentTimeMillis() / 100;
+ }
+
+ public int getTimeout() {
+ return 600; //60 seconds
+ }
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/CacheInstanceManager.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/CacheInstanceManager.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/CacheInstanceManager.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2;
+
+import org.jboss.cache.Cache;
+
+/**
+ * Acts as a buffer from how instances of {@link Cache} are built/obtained.
+ *
+ * @author Steve Ebersole
+ */
+public interface CacheInstanceManager {
+ /**
+ * Retrieve a handle to the {@link Cache} instance to be used for storing
+ * entity data.
+ *
+ * @return The entity data cache instance.
+ */
+ public Cache getEntityCacheInstance();
+
+ /**
+ * Retrieve a handle to the {@link Cache} instance to be used for storing
+ * collection data.
+ *
+ * @return The collection data cache instance.
+ */
+ public Cache getCollectionCacheInstance();
+
+ /**
+ * Retrieve a handle to the {@link Cache} instance to be used for storing
+ * query results.
+ *
+ * @return The query result cache instance.
+ */
+ public Cache getQueryCacheInstance();
+
+ /**
+ * Retrieve a handle to the {@link Cache} instance to be used for storing
+ * timestamps.
+ *
+ * @return The timestamps cache instance.
+ */
+ public Cache getTimestampsCacheInstance();
+
+ /**
+ * Release any held resources.
+ */
+ public void release();
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/JBossCacheRegionFactory.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/JBossCacheRegionFactory.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/JBossCacheRegionFactory.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2;
+
+import java.util.Properties;
+
+import org.hibernate.cache.CacheDataDescription;
+import org.hibernate.cache.CacheException;
+import org.hibernate.cache.CollectionRegion;
+import org.hibernate.cache.EntityRegion;
+import org.hibernate.cache.QueryResultsRegion;
+import org.hibernate.cache.RegionFactory;
+import org.hibernate.cache.TimestampsRegion;
+import org.hibernate.cache.jbc2.builder.InvalidationCacheInstanceManager;
+import org.hibernate.cache.jbc2.entity.EntityRegionImpl;
+import org.hibernate.cfg.Settings;
+
+/**
+ * {@inheritDoc}
+ *
+ * @author Steve Ebersole
+ */
+public class JBossCacheRegionFactory implements RegionFactory {
+ private CacheInstanceManager cacheInstanceManager;
+
+ public JBossCacheRegionFactory() {
+ }
+
+ public JBossCacheRegionFactory(CacheInstanceManager cacheInstanceManager) {
+ this.cacheInstanceManager = cacheInstanceManager;
+ }
+
+ public void start(Settings settings, Properties properties) throws CacheException {
+ if ( cacheInstanceManager == null ) {
+ cacheInstanceManager = new InvalidationCacheInstanceManager( settings, properties );
+ }
+ }
+
+ public void stop() {
+ if ( cacheInstanceManager != null ) {
+ cacheInstanceManager.release();
+ }
+ }
+
+ public boolean isMinimalPutsEnabledByDefault() {
+ return true;
+ }
+
+ public long nextTimestamp() {
+ return System.currentTimeMillis() / 100;
+ }
+
+ public EntityRegion buildEntityRegion(
+ String regionName,
+ Properties properties,
+ CacheDataDescription metadata) throws CacheException {
+ return new EntityRegionImpl( cacheInstanceManager.getEntityCacheInstance(), regionName, metadata );
+ }
+
+ public CollectionRegion buildCollectionRegion(
+ String regionName,
+ Properties properties,
+ CacheDataDescription metadata) throws CacheException {
+ return null;
+ }
+
+ public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
+ return null;
+ }
+
+ public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
+ return null;
+ }
+
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/InvalidationCacheInstanceManager.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/InvalidationCacheInstanceManager.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/InvalidationCacheInstanceManager.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2.builder;
+
+import java.util.Properties;
+
+import javax.transaction.TransactionManager;
+
+import org.jboss.cache.Cache;
+import org.jboss.cache.DefaultCacheFactory;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.hibernate.util.PropertiesHelper;
+import org.hibernate.cache.jbc2.CacheInstanceManager;
+import org.hibernate.cfg.Settings;
+
+/**
+ * A {@link org.hibernate.cache.jbc2.CacheInstanceManager} implementation where we use a single cache instance
+ * we assume to be configured for invalidation if operating on a cluster. Under that
+ * assumption, we can store all data into the same {@link Cache} instance.
+ *
+ * @author Steve Ebersole
+ */
+public class InvalidationCacheInstanceManager implements CacheInstanceManager {
+ public static final String CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc2.cfg.invalidation";
+ public static final String DEFAULT_CACHE_RESOURCE = "treecache.xml";
+
+ private static final Log log = LogFactory.getLog( InvalidationCacheInstanceManager.class );
+
+ private final Cache cache;
+
+ public InvalidationCacheInstanceManager(Settings settings, Properties properties) {
+ String configResource = PropertiesHelper.getString( CACHE_RESOURCE_PROP, properties, DEFAULT_CACHE_RESOURCE );
+ cache = DefaultCacheFactory.getInstance().createCache( configResource, false );
+ if ( settings.getTransactionManagerLookup() != null ) {
+ TransactionManager tm = settings.getTransactionManagerLookup().getTransactionManager( properties );
+ if ( tm != null ) {
+ cache.getConfiguration().getRuntimeConfig().setTransactionManager( tm );
+ }
+ }
+ cache.start();
+ }
+
+ public InvalidationCacheInstanceManager(Cache cache) {
+ this.cache = cache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getEntityCacheInstance() {
+ return cache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getCollectionCacheInstance() {
+ return cache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getQueryCacheInstance() {
+ return cache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getTimestampsCacheInstance() {
+ return cache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void release() {
+ if ( cache != null ) {
+ try {
+ cache.stop();
+ }
+ catch( Throwable t ) {
+ log.warn( "Unable to stop cache instance", t );
+ }
+ }
+ }
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/MultiplexingCacheInstanceManager.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/MultiplexingCacheInstanceManager.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/builder/MultiplexingCacheInstanceManager.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,204 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2.builder;
+
+import java.util.Properties;
+import javax.transaction.TransactionManager;
+
+import org.jboss.cache.Cache;
+import org.jboss.cache.DefaultCacheFactory;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.hibernate.cache.CacheException;
+import org.hibernate.cache.jbc2.CacheInstanceManager;
+import org.hibernate.cfg.Settings;
+import org.hibernate.util.PropertiesHelper;
+
+/**
+ * Here we build separate {@link Cache} instances for each type of region, but
+ * using the jgroups multiplexer under the covers to re-use the same group comm
+ * stack.
+ *
+ * @author Steve Ebersole
+ */
+public class MultiplexingCacheInstanceManager implements CacheInstanceManager {
+ public static final String ENTITY_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc2.cfg.entity";
+ public static final String COLL_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc2.cfg.collection";
+ public static final String TS_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc2.cfg.ts";
+ public static final String QUERY_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc2.cfg.query";
+
+ public static final String DEF_ENTITY_RESOURCE = "entity-cache.xml";
+ public static final String DEF_COLL_RESOURCE = "collection-cache.xml";
+ public static final String DEF_TS_RESOURCE = "ts-cache.xml";
+ public static final String DEF_QUERY_RESOURCE = "query-cache.xml";
+
+ public static final String OPTIMISTIC_LOCKING_SCHEME = "OPTIMISTIC";
+
+ private static final Log log = LogFactory.getLog( MultiplexingCacheInstanceManager.class );
+
+ private final Cache jbcEntityCache;
+ private final Cache jbcCollectionCache;
+ private final Cache jbcTsCache;
+ private final Cache jbcQueryCache;
+
+ public MultiplexingCacheInstanceManager(Settings settings, Properties properties) {
+ try {
+ TransactionManager tm = settings.getTransactionManagerLookup() == null
+ ? null
+ : settings.getTransactionManagerLookup().getTransactionManager( properties );
+ if ( settings.isSecondLevelCacheEnabled() ) {
+ jbcEntityCache = buildEntityRegionCacheInstance( properties );
+ jbcCollectionCache = buildCollectionRegionCacheInstance( properties );
+ if ( tm != null ) {
+ jbcEntityCache.getConfiguration().getRuntimeConfig().setTransactionManager( tm );
+ jbcCollectionCache.getConfiguration().getRuntimeConfig().setTransactionManager( tm );
+ }
+ }
+ else {
+ jbcEntityCache = null;
+ jbcCollectionCache = null;
+ }
+ if ( settings.isQueryCacheEnabled() ) {
+ jbcTsCache = buildTsRegionCacheInstance( properties );
+ jbcQueryCache = buildQueryRegionCacheInstance( properties );
+ }
+ else {
+ jbcTsCache = null;
+ jbcQueryCache = null;
+ }
+ }
+ catch( CacheException ce ) {
+ throw ce;
+ }
+ catch( Throwable t ) {
+ throw new CacheException( "Unable to start region factory", t );
+ }
+ }
+
+ public MultiplexingCacheInstanceManager(Cache jbcEntityCache, Cache jbcCollectionCache, Cache jbcTsCache, Cache jbcQueryCache) {
+ this.jbcEntityCache = jbcEntityCache;
+ this.jbcCollectionCache = jbcCollectionCache;
+ this.jbcTsCache = jbcTsCache;
+ this.jbcQueryCache = jbcQueryCache;
+ }
+
+ protected Cache buildEntityRegionCacheInstance(Properties properties) {
+ try {
+ String configResource = PropertiesHelper.getString( ENTITY_CACHE_RESOURCE_PROP, properties, DEF_ENTITY_RESOURCE );
+ return DefaultCacheFactory.getInstance().createCache( configResource );
+ }
+ catch( Throwable t ) {
+ throw new CacheException( "unable to build entity region cache instance", t );
+ }
+ }
+
+ protected Cache buildCollectionRegionCacheInstance(Properties properties) {
+ try {
+ String configResource = PropertiesHelper.getString( COLL_CACHE_RESOURCE_PROP, properties, DEF_COLL_RESOURCE );
+ return DefaultCacheFactory.getInstance().createCache( configResource );
+ }
+ catch( Throwable t ) {
+ throw new CacheException( "unable to build collection region cache instance", t );
+ }
+ }
+
+ protected Cache buildTsRegionCacheInstance(Properties properties) {
+ try {
+ String configResource = PropertiesHelper.getString( TS_CACHE_RESOURCE_PROP, properties, DEF_TS_RESOURCE );
+ return DefaultCacheFactory.getInstance().createCache( configResource );
+ }
+ catch( Throwable t ) {
+ throw new CacheException( "unable to build timestamps region cache instance", t );
+ }
+ }
+
+ protected Cache buildQueryRegionCacheInstance(Properties properties) {
+ try {
+ String configResource = PropertiesHelper.getString( QUERY_CACHE_RESOURCE_PROP, properties, DEF_QUERY_RESOURCE );
+ return DefaultCacheFactory.getInstance().createCache( configResource );
+ }
+ catch( Throwable t ) {
+ throw new CacheException( "unable to build query region cache instance", t );
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getEntityCacheInstance() {
+ return jbcEntityCache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getCollectionCacheInstance() {
+ return jbcCollectionCache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getQueryCacheInstance() {
+ return jbcQueryCache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public Cache getTimestampsCacheInstance() {
+ return jbcTsCache;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void release() {
+ if ( jbcEntityCache != null ) {
+ try {
+ jbcEntityCache.stop();
+ }
+ catch( Throwable t ) {
+ log.info( "Unable to stop entity cache instance", t );
+ }
+ }
+ if ( jbcCollectionCache != null ) {
+ try {
+ jbcCollectionCache.stop();
+ }
+ catch( Throwable t ) {
+ log.info( "Unable to stop collection cache instance", t );
+ }
+ }
+ if ( jbcTsCache != null ) {
+ try {
+ jbcTsCache.stop();
+ }
+ catch( Throwable t ) {
+ log.info( "Unable to stop timestamp cache instance", t );
+ }
+ }
+ if ( jbcQueryCache != null ) {
+ try {
+ jbcQueryCache.stop();
+ }
+ catch( Throwable t ) {
+ log.info( "Unable to stop query cache instance", t );
+ }
+ }
+ }
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/EntityRegionImpl.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/EntityRegionImpl.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/EntityRegionImpl.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2.entity;
+
+import org.jboss.cache.Cache;
+import org.jboss.cache.Fqn;
+
+import org.hibernate.cache.CacheDataDescription;
+import org.hibernate.cache.CacheException;
+import org.hibernate.cache.EntityRegion;
+import org.hibernate.cache.access.AccessType;
+import org.hibernate.cache.access.EntityRegionAccessStrategy;
+import org.hibernate.cache.jbc2.BasicRegionAdapter;
+
+
+/**
+ * Defines the behavior of the entity cache regions for JBossCache.
+ *
+ * @author Steve Ebersole
+ */
+public class EntityRegionImpl extends BasicRegionAdapter implements EntityRegion {
+ private final CacheDataDescription metadata;
+
+ public EntityRegionImpl(Cache jbcCache, String regionName, CacheDataDescription metadata) {
+ super( jbcCache, regionName );
+ this.metadata = metadata;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
+ // todo : allow the other strategies, which will require a non-transactional cache instance
+ if ( ! ( AccessType.READ_ONLY.equals( accessType ) || AccessType.TRANSACTIONAL.equals( accessType ) ) ) {
+ throw new CacheException(
+ "TreeCacheRegionFactory only supports " + AccessType.READ_ONLY.getName() + " or " +
+ AccessType.TRANSACTIONAL + " access strategies [" + accessType.getName() + "]"
+ );
+ }
+ return null;
+ }
+
+ /**
+ * Here, for JBossCache, we consider the cache to be transaction aware if the underlying
+ * cache instance has a refernece to the transaction manager.
+ */
+ public boolean isTransactionAware() {
+ return jbcCache.getConfiguration().getRuntimeConfig().getTransactionManager() != null;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public CacheDataDescription getCacheDataDescription() {
+ return metadata;
+ }
+
+ Cache getCacheInstance() {
+ return jbcCache;
+ }
+
+ Fqn getRegionFqn() {
+ return regionFqn;
+ }
+}
Added: core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/ReadOnlyAccess.java
===================================================================
--- core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/ReadOnlyAccess.java (rev 0)
+++ core/trunk/cache-jbosscache2/src/main/java/org/hibernate/cache/jbc2/entity/ReadOnlyAccess.java 2007-07-24 20:34:07 UTC (rev 12811)
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, v. 2.1. This program is distributed in the
+ * hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
+ * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details. You should have received a
+ * copy of the GNU Lesser General Public License, v.2.1 along with this
+ * distribution; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Red Hat Author(s): Steve Ebersole
+ */
+package org.hibernate.cache.jbc2.entity;
+
+import org.jboss.cache.Fqn;
+import org.jboss.cache.lock.TimeoutException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.hibernate.cache.access.EntityRegionAccessStrategy;
+import org.hibernate.cache.access.SoftLock;
+import org.hibernate.cache.EntityRegion;
+import org.hibernate.cache.CacheException;
+
+/**
+ * {@inheritDoc}
+ *
+ * @author Steve Ebersole
+ */
+public class ReadOnlyAccess implements EntityRegionAccessStrategy {
+ private static final Log log = LogFactory.getLog( ReadOnlyAccess.class );
+
+ private final EntityRegionImpl region;
+
+ public ReadOnlyAccess(EntityRegionImpl region) {
+ this.region = region;
+ }
+
+ public EntityRegion getRegion() {
+ return region;
+ }
+
+ public Object get(Object key, long txTimestamp) throws CacheException {
+ try {
+ return region.getCacheInstance().get( region.getRegionFqn(), EntityRegionImpl.ITEM );
+ }
+ catch ( Exception e ) {
+ throw new CacheException( e );
+ }
+ }
+
+ public boolean putFromLoad(
+ Object key,
+ Object value,
+ long txTimestamp,
+ Object version) throws CacheException {
+ try {
+ region.getCacheInstance().putForExternalRead( region.getRegionFqn(), key, value );
+ return true;
+ }
+ catch ( TimeoutException te) {
+ //ignore!
+ log.debug( "ignoring write lock acquisition failure" );
+ return false;
+ }
+ catch ( Throwable t ) {
+ throw new CacheException( t );
+ }
+ }
+
+ public boolean putFromLoad(
+ Object key,
+ Object value,
+ long txTimestamp,
+ Object version,
+ boolean minimalPutOverride) throws CacheException {
+ return putFromLoad( key, value, txTimestamp, version );
+ }
+
+ public SoftLock lockItem(Object key, Object version) throws CacheException {
+ throw new UnsupportedOperationException( "Illegal attempt to lock (edit) read only item" );
+ }
+
+ public SoftLock lockRegion() throws CacheException {
+ throw new UnsupportedOperationException( "Illegal attempt to lock (edit) read only region" );
+ }
+
+ public void unlockItem(Object key, SoftLock lock) throws CacheException {
+ log.error( "Illegal attempt to lock (edit) read only item" );
+ }
+
+ public void unlockRegion(SoftLock lock) throws CacheException {
+ log.error( "Illegal attempt to lock (edit) read only region" );
+ }
+
+ public boolean insert(Object key, Object value, Object version) throws CacheException {
+ try {
+ region.getCacheInstance().put( new Fqn( region.getRegionFqn(), key ), EntityRegionImpl.ITEM, value );
+ }
+ catch (Exception e) {
+ throw new CacheException(e);
+ }
+ return true;
+ }
+
+ public boolean afterInsert(Object key, Object value, Object version) throws CacheException {
+ return false;
+ }
+
+ public boolean update(
+ Object key,
+ Object value,
+ Object currentVersion,
+ Object previousVersion) throws CacheException {
+ throw new UnsupportedOperationException( "Illegal attempt to lock (edit) read only item" );
+ }
+
+ public boolean afterUpdate(
+ Object key,
+ Object value,
+ Object currentVersion,
+ Object previousVersion,
+ SoftLock lock) throws CacheException {
+ throw new UnsupportedOperationException( "Illegal attempt to lock (edit) read only item" );
+ }
+
+ public void remove(Object key) throws CacheException {
+ try {
+ region.getCacheInstance().remove( region.getRegionFqn(), key );
+ }
+ catch ( Exception e ) {
+ throw new CacheException( e );
+ }
+ }
+
+ public void removeAll() throws CacheException {
+ try {
+ region.getCacheInstance().removeNode( region.getRegionFqn() );
+ }
+ catch ( Exception e ) {
+ throw new CacheException( e );
+ }
+ }
+
+ public void evict(Object key) throws CacheException {
+ }
+
+ public void evictAll() throws CacheException {
+ }
+
+ public void destroy() {
+ region.destroy();
+ }
+}
17 years, 5 months
Hibernate SVN: r12810 - trunk/HibernateExt/search.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2007-07-24 06:12:37 -0400 (Tue, 24 Jul 2007)
New Revision: 12810
Modified:
trunk/HibernateExt/search/
Log:
Updated svn ignore list
Property changes on: trunk/HibernateExt/search
___________________________________________________________________
Name: svn:ignore
+ .bin
build
.classpath
.project
dist
target
indextemp
test_output
17 years, 5 months
Hibernate SVN: r12809 - trunk/HibernateExt/search.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2007-07-24 05:53:54 -0400 (Tue, 24 Jul 2007)
New Revision: 12809
Modified:
trunk/HibernateExt/search/build.xml
Log:
Switched forkmode to perTest so that the JMS Slave/Master test both succeed.
Modified: trunk/HibernateExt/search/build.xml
===================================================================
--- trunk/HibernateExt/search/build.xml 2007-07-24 09:52:54 UTC (rev 12808)
+++ trunk/HibernateExt/search/build.xml 2007-07-24 09:53:54 UTC (rev 12809)
@@ -180,7 +180,7 @@
<target name="junit" depends="compiletest, prepare-test-resources">
<mkdir dir="test_output"/>
- <junit forkmode="once" printsummary="yes" haltonfailure="yes">
+ <junit forkmode="perTest" printsummary="yes" haltonfailure="yes">
<classpath>
<path path="${build.testresources.dir}"/>
<!-- dirset dir="${build.testresources.dir}">
17 years, 5 months
Hibernate SVN: r12808 - trunk/HibernateExt/search/src/test/org/hibernate/search/test/session.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2007-07-24 05:52:54 -0400 (Tue, 24 Jul 2007)
New Revision: 12808
Modified:
trunk/HibernateExt/search/src/test/org/hibernate/search/test/session/MassIndexTest.java
Log:
HSEARCH-67.
Import cleanup.
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/session/MassIndexTest.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/session/MassIndexTest.java 2007-07-24 09:52:27 UTC (rev 12807)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/session/MassIndexTest.java 2007-07-24 09:52:54 UTC (rev 12808)
@@ -2,26 +2,18 @@
package org.hibernate.search.test.session;
import java.util.List;
-import java.sql.ResultSet;
-import java.io.File;
import org.hibernate.search.test.SearchTestCase;
import org.hibernate.search.impl.FullTextSessionImpl;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
import org.hibernate.search.Environment;
-import org.hibernate.search.store.FSDirectoryProvider;
-import org.hibernate.search.util.ContextHelper;
import org.hibernate.Transaction;
import org.hibernate.Session;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.analysis.StopAnalyzer;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Searcher;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.Term;
17 years, 5 months
Hibernate SVN: r12807 - trunk/HibernateExt/search/src/test/org/hibernate/search/test.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2007-07-24 05:52:27 -0400 (Tue, 24 Jul 2007)
New Revision: 12807
Modified:
trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java
Log:
HSEARCH-67.
Setting a mergeFactor to verify that the changes work. Unfortuantely verification is based on visual inspecition. There is no unit test yet. Maybe some mocking would help here ??
Modified: trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java
===================================================================
--- trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java 2007-07-24 09:51:04 UTC (rev 12806)
+++ trunk/HibernateExt/search/src/test/org/hibernate/search/test/SearchTestCase.java 2007-07-24 09:52:27 UTC (rev 12807)
@@ -40,5 +40,7 @@
protected void configure(org.hibernate.cfg.Configuration cfg) {
cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( "hibernate.search.default.merge_factor", "100" );
+ cfg.setProperty( "hibernate.search.default.max_buffered_docs", "1000" );
}
}
17 years, 5 months
Hibernate SVN: r12806 - in trunk/HibernateExt/search/src/java/org/hibernate/search: backend/impl/lucene and 2 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2007-07-24 05:51:04 -0400 (Tue, 24 Jul 2007)
New Revision: 12806
Modified:
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkType.java
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
trunk/HibernateExt/search/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
Log:
HSEARCH-67.
Enabled the WorkType.INDEX and chaged all required files to use the lucene indexing parameters in case this new work type is used.
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/LuceneWork.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -9,12 +9,18 @@
* Represent a Serializable Lucene unit work
*
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public abstract class LuceneWork implements Serializable {
//TODO set a serial id
private Document document;
private Class entityClass;
private Serializable id;
+
+ /**
+ * Flag indicating if this lucene work has to be indexed in batch mode.
+ */
+ private boolean batch = false;
public LuceneWork(Serializable id, Class entity) {
this( id, entity, null );
@@ -26,7 +32,14 @@
this.document = document;
}
+ public boolean isBatch() {
+ return batch;
+ }
+ public void setBatch(boolean batch) {
+ this.batch = batch;
+ }
+
public Document getDocument() {
return document;
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkType.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkType.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/WorkType.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -2,11 +2,19 @@
package org.hibernate.search.backend;
/**
+ * Enumeration of the different types of Lucene work. This enumeration is used to specify the type
+ * of index operation to be executed.
+ *
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public enum WorkType {
ADD,
UPDATE,
- DELETE
- //add INDEX at some point to behave differently during the queue process?
+ DELETE,
+
+ /**
+ * This type is used for batch indexing.
+ */
+ INDEX
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/Workspace.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -22,15 +22,24 @@
import org.hibernate.annotations.common.AssertionFailure;
/**
- * Lucene workspace
- * This is not intended to be used in a multithreaded environment
+ * Lucene workspace.
+ * <p>
+ * <b>This is not intended to be used in a multithreaded environment</b>.
* <p/>
- * One cannot execute modification through an IndexReader when an IndexWriter has been acquired on the same underlying directory
- * One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same underlying directory
- * The recommended approach is to execute all the modifications on the IndexReaders, {@link #clean()} }, and acquire the
- * index writers
+ * <ul>
+ * <li>One cannot execute modification through an IndexReader when an IndexWriter has been acquired
+ * on the same underlying directory
+ * </li>
+ * <li>One cannot get an IndexWriter when an IndexReader have been acquired and modificed on the same
+ * underlying directory
+ * </li>
+ * <li>The recommended approach is to execute all the modifications on the IndexReaders, {@link #clean()}, and acquire the
+ * index writers
+ * </li>
+ * </ul>
*
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
//TODO introduce the notion of read only IndexReader? We cannot enforce it because Lucene use abstract classes, not interfaces
public class Workspace {
@@ -41,12 +50,16 @@
private Map<DirectoryProvider, DPStatistics> dpStatistics = new HashMap<DirectoryProvider, DPStatistics>();
private SearchFactoryImplementor searchFactoryImplementor;
+ /**
+ * Flag indicating if the current work should be executed the Lucene parameters for batch indexing.
+ */
+ private boolean isBatch = false;
+
public Workspace(SearchFactoryImplementor searchFactoryImplementor) {
this.searchFactoryImplementor = searchFactoryImplementor;
}
-
public DocumentBuilder getDocumentBuilder(Class entity) {
return searchFactoryImplementor.getDocumentBuilders().get( entity );
}
@@ -115,7 +128,22 @@
Analyzer analyzer = entity != null ?
searchFactoryImplementor.getDocumentBuilders().get( entity ).getAnalyzer() :
new SimpleAnalyzer(); //never used
- writer = new IndexWriter( provider.getDirectory(), analyzer, false ); //have been created at init time
+ writer = new IndexWriter( provider.getDirectory(), analyzer, false ); //has been created at init time
+
+ LuceneIndexingParameters indexingParams = searchFactoryImplementor.getIndexingParameters(provider);
+ if(isBatch)
+ {
+ writer.setMergeFactor(indexingParams.getBatchMergeFactor());
+ writer.setMaxMergeDocs(indexingParams.getBatchMaxMergeDocs());
+ writer.setMaxBufferedDocs(indexingParams.getBatchMaxBufferedDocs());
+ }
+ else
+ {
+ writer.setMergeFactor(indexingParams.getMergeFactor());
+ writer.setMaxMergeDocs(indexingParams.getMaxMergeDocs());
+ writer.setMaxBufferedDocs(indexingParams.getMaxBufferedDocs());
+ }
+
writers.put( provider, writer );
}
catch (IOException e) {
@@ -214,4 +242,11 @@
public long operations;
}
+ public boolean isBatch() {
+ return isBatch;
+ }
+
+ public void setBatch(boolean isBatch) {
+ this.isBatch = isBatch;
+ }
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -5,6 +5,8 @@
import java.util.Comparator;
import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
@@ -13,12 +15,18 @@
import org.hibernate.search.store.DirectoryProvider;
/**
- * Apply the operations to Lucene directories
- * avoiding deadlocks
+ * Apply the operations to Lucene directories avoiding deadlocks.
*
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public class LuceneBackendQueueProcessor implements Runnable {
+
+ /**
+ * Class logger.
+ */
+ private static Log log = LogFactory.getLog( LuceneBackendQueueProcessor.class );
+
private List<LuceneWork> queue;
private SearchFactoryImplementor searchFactoryImplementor;
@@ -33,7 +41,8 @@
workspace = new Workspace( searchFactoryImplementor );
worker = new LuceneWorker( workspace );
try {
- deadlockFreeQueue(queue, workspace, searchFactoryImplementor);
+ deadlockFreeQueue(queue, workspace, searchFactoryImplementor);
+ checkForBatchIndexing(workspace);
for ( LuceneWork luceneWork : queue ) {
worker.performWork( luceneWork );
}
@@ -44,6 +53,16 @@
}
}
+ private void checkForBatchIndexing(Workspace workspace) {
+ for ( LuceneWork luceneWork : queue ) {
+ // if there is at least a single batch index job we put the work space into batch indexing mode.
+ if(luceneWork.isBatch()){
+ log.debug("Setting batch indexing mode.");
+ workspace.setBatch(true);
+ }
+ }
+ }
+
/**
* one must lock the directory providers in the exact same order to avoid
* dead lock between concurrent threads or processes
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/backend/impl/lucene/LuceneWorker.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -21,9 +21,10 @@
import org.hibernate.search.engine.DocumentBuilder;
/**
- * Stateless implementation that perform a work
+ * Stateless implementation that performs a unit of work.
*
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public class LuceneWorker {
private Workspace workspace;
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/DocumentBuilder.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -54,6 +54,7 @@
* @author Emmanuel Bernard
* @author Sylvain Vieujot
* @author Richard Hallier
+ * @author Hardy Ferentschik
*/
public class DocumentBuilder<T> {
private static final Log log = LogFactory.getLog( DocumentBuilder.class );
@@ -393,6 +394,15 @@
queue.add( new AddLuceneWork( id, entityClass, doc ) );
searchForContainers = true;
}
+ else if ( workType == WorkType.INDEX ) {
+ Document doc = getDocument( entity, id );
+ queue.add(new DeleteLuceneWork(id, entityClass) );
+ LuceneWork work = new AddLuceneWork( id, entityClass, doc );
+ work.setBatch(true);
+ queue.add(work);
+ searchForContainers = true;
+ }
+
else {
throw new AssertionFailure("Unknown WorkType: " + workType);
}
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -1,4 +1,4 @@
-//$Id: $
+// $Id$
package org.hibernate.search.engine;
import java.util.Map;
@@ -8,10 +8,14 @@
import org.hibernate.search.store.DirectoryProvider;
import org.hibernate.search.store.optimization.OptimizerStrategy;
import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneIndexingParameters;
import org.hibernate.search.backend.Worker;
/**
+ * Interface which gives access to the different directory providers and their configuration.
+ *
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public interface SearchFactoryImplementor extends SearchFactory {
BackendQueueProcessorFactory getBackendQueueProcessorFactory();
@@ -27,4 +31,8 @@
void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy);
public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider);
+
+ public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider );
+
+ void addIndexingParmeters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams);
}
Property changes on: trunk/HibernateExt/search/src/java/org/hibernate/search/engine/SearchFactoryImplementor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -52,7 +52,7 @@
import org.hibernate.type.Type;
/**
- * Lucene Full text search aware session
+ * Lucene full text search aware session.
*
* @author Emmanuel Bernard
*/
@@ -83,7 +83,7 @@
* Non indexable entities are ignored
* The entity must be associated with the session
*
- * @param entity must not be null
+ * @param entity The neity to index - must not be <code>null</code>.
*/
public void index(Object entity) {
if (entity == null) return;
@@ -94,7 +94,7 @@
DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
if ( builder != null ) {
Serializable id = session.getIdentifier( entity );
- searchFactoryImplementor.getWorker().performWork( entity, id, WorkType.UPDATE, eventSource );
+ searchFactoryImplementor.getWorker().performWork( entity, id, WorkType.INDEX, eventSource );
}
//TODO
//need to add elements in a queue kept at the Session level
Modified: trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2007-07-24 09:48:22 UTC (rev 12805)
+++ trunk/HibernateExt/search/src/java/org/hibernate/search/impl/SearchFactoryImpl.java 2007-07-24 09:51:04 UTC (rev 12806)
@@ -1,13 +1,13 @@
//$Id$
package org.hibernate.search.impl;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
-import java.util.List;
-import java.util.ArrayList;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.analysis.Analyzer;
@@ -22,10 +22,11 @@
import org.hibernate.search.Version;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneIndexingParameters;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
import org.hibernate.search.backend.Worker;
import org.hibernate.search.backend.WorkerFactory;
-import org.hibernate.search.backend.LuceneWork;
-import org.hibernate.search.backend.OptimizeLuceneWork;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.reader.ReaderProvider;
@@ -36,6 +37,7 @@
import org.hibernate.util.ReflectHelper;
/**
+ *
* @author Emmanuel Bernard
*/
public class SearchFactoryImpl implements SearchFactoryImplementor {
@@ -45,7 +47,7 @@
static {
Version.touch();
}
-
+
private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>();
//keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
private Map<DirectoryProvider, ReentrantLock> lockableDirectoryProviders =
@@ -55,7 +57,15 @@
private Worker worker;
private ReaderProvider readerProvider;
private BackendQueueProcessorFactory backendQueueProcessorFactory;
+
+
+ /**
+ * Each directory provider (index) can have its own performance settings.
+ */
+ private Map<DirectoryProvider, LuceneIndexingParameters> dirProviderIndexingParams =
+ new HashMap<DirectoryProvider, LuceneIndexingParameters>();
+
public BackendQueueProcessorFactory getBackendQueueProcessorFactory() {
return backendQueueProcessorFactory;
}
@@ -64,63 +74,16 @@
this.backendQueueProcessorFactory = backendQueueProcessorFactory;
}
+ @SuppressWarnings( "unchecked" )
public SearchFactoryImpl(Configuration cfg) {
//yuk
ReflectionManager reflectionManager = getReflectionManager( cfg );
- Class analyzerClass;
- String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
- if ( analyzerClassName != null ) {
- try {
- analyzerClass = ReflectHelper.classForName( analyzerClassName );
- }
- catch (Exception e) {
- throw new SearchException(
- "Lucene analyzer class '" + analyzerClassName + "' defined in property '" + Environment.ANALYZER_CLASS + "' could not be found.",
- e
- );
- }
- }
- else {
- analyzerClass = StandardAnalyzer.class;
- }
- // Initialize analyzer
- Analyzer defaultAnalyzer;
- try {
- defaultAnalyzer = (Analyzer) analyzerClass.newInstance();
- }
- catch (ClassCastException e) {
- throw new SearchException(
- "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClassName, e
- );
- }
- catch (Exception e) {
- throw new SearchException( "Failed to instantiate lucene analyzer with type " + analyzerClassName, e );
- }
-
- Iterator iter = cfg.getClassMappings();
- DirectoryProviderFactory factory = new DirectoryProviderFactory();
- while ( iter.hasNext() ) {
- PersistentClass clazz = (PersistentClass) iter.next();
- Class<?> mappedClass = clazz.getMappedClass();
- if ( mappedClass != null ) {
- XClass mappedXClass = reflectionManager.toXClass( mappedClass );
- if ( mappedXClass != null && mappedXClass.isAnnotationPresent( Indexed.class ) ) {
- DirectoryProvider provider = factory.createDirectoryProvider( mappedXClass, cfg, this );
- //TODO move that into DirectoryProviderFactory
- if ( !lockableDirectoryProviders.containsKey( provider ) ) {
- lockableDirectoryProviders.put( provider, new ReentrantLock() );
- }
- final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(
- mappedXClass, defaultAnalyzer, provider, reflectionManager
- );
-
- documentBuilders.put( mappedClass, documentBuilder );
- }
- }
- }
+ Analyzer analyzer = initAnalyzer(cfg);
+ initDocumentBuilders(cfg, reflectionManager, analyzer);
+
Set<Class> indexedClasses = documentBuilders.keySet();
- for (DocumentBuilder builder : documentBuilders.values()) {
+ for (DocumentBuilder builder : documentBuilders.values()) {
builder.postInitialize( indexedClasses );
}
worker = WorkerFactory.createWorker( cfg, this );
@@ -161,10 +124,18 @@
public void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy) {
dirProviderOptimizerStrategies.put( provider, optimizerStrategy );
}
+
+ public void addIndexingParmeters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams) {
+ dirProviderIndexingParams.put( provider, indexingParams );
+ }
public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider) {
return dirProviderOptimizerStrategies.get( provider );
}
+
+ public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider ) {
+ return dirProviderIndexingParams.get( provider );
+ }
public ReaderProvider getReaderProvider() {
return readerProvider;
@@ -207,4 +178,60 @@
queue.add( new OptimizeLuceneWork( entityType ) );
getBackendQueueProcessorFactory().getProcessor( queue ).run();
}
+
+ private void initDocumentBuilders(Configuration cfg, ReflectionManager reflectionManager, Analyzer analyzer) {
+ Iterator iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+ while (iter.hasNext()) {
+ PersistentClass clazz = (PersistentClass) iter.next();
+ Class<?> mappedClass = clazz.getMappedClass();
+ if (mappedClass != null) {
+ XClass mappedXClass = reflectionManager.toXClass(mappedClass);
+ if (mappedXClass != null && mappedXClass.isAnnotationPresent(Indexed.class)) {
+ DirectoryProvider provider = factory.createDirectoryProvider(mappedXClass, cfg, this);
+ // TODO move that into DirectoryProviderFactory
+ if (!lockableDirectoryProviders.containsKey(provider)) {
+ lockableDirectoryProviders.put(provider, new ReentrantLock());
+ }
+ final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>(mappedXClass, analyzer,
+ provider, reflectionManager);
+
+ documentBuilders.put(mappedClass, documentBuilder);
+ }
+ }
+ }
+ }
+
+ /**
+ * Initilises the Lucene analyzer to use by reading the analyzer class from the configuration and instantiating it.
+ *
+ * @param cfg
+ * The current configuration.
+ * @return The Lucene analyzer to use for tokenisation.
+ */
+ private Analyzer initAnalyzer(Configuration cfg) {
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty(Environment.ANALYZER_CLASS);
+ if (analyzerClassName != null) {
+ try {
+ analyzerClass = ReflectHelper.classForName(analyzerClassName);
+ } catch (Exception e) {
+ throw new SearchException("Lucene analyzer class '" + analyzerClassName + "' defined in property '"
+ + Environment.ANALYZER_CLASS + "' could not be found.", e);
+ }
+ } else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ // Initialize analyzer
+ Analyzer defaultAnalyzer;
+ try {
+ defaultAnalyzer = (Analyzer) analyzerClass.newInstance();
+ } catch (ClassCastException e) {
+ throw new SearchException("Lucene analyzer does not implement " + Analyzer.class.getName() + ": "
+ + analyzerClassName, e);
+ } catch (Exception e) {
+ throw new SearchException("Failed to instantiate lucene analyzer with type " + analyzerClassName, e);
+ }
+ return defaultAnalyzer;
+ }
}
17 years, 5 months