Hibernate SVN: r19008 - search/branches/v3_1_1_GA_CP.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-16 06:48:52 -0400 (Tue, 16 Mar 2010)
New Revision: 19008
Modified:
search/branches/v3_1_1_GA_CP/pom.xml
Log:
JBPAPP-3922 add new DB profile - oracle11gR2RAC
Modified: search/branches/v3_1_1_GA_CP/pom.xml
===================================================================
--- search/branches/v3_1_1_GA_CP/pom.xml 2010-03-16 10:47:58 UTC (rev 19007)
+++ search/branches/v3_1_1_GA_CP/pom.xml 2010-03-16 10:48:52 UTC (rev 19008)
@@ -774,6 +774,25 @@
<jdbc.isolation>4096</jdbc.isolation>
</properties>
</profile>
+ <profile>
+ <id>oracle11gR2RAC</id>
+ <dependencies>
+ <dependency>
+ <groupId>com.oracle</groupId>
+ <artifactId>ojdbc6</artifactId>
+ <version>11.2.0.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <properties>
+ <db.dialect>org.hibernate.dialect.Oracle10gDialect</db.dialect>
+ <jdbc.driver>oracle.jdbc.driver.OracleDriver</jdbc.driver>
+ <jdbc.url>jdbc:oracle:thin:@(DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=vmg27-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=vmg28-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=qarac.jboss)))</jdbc.url>
+ <jdbc.user>hibbr330</jdbc.user>
+ <jdbc.pass>hibbr330</jdbc.pass>
+ <jdbc.isolation/>
+ </properties>
+ </profile>
<!-- ================================ -->
<!-- Dependecy profiles to test w and -->
<!-- w/o optional dependencies -->
15 years, 8 months
Hibernate SVN: r19007 - entitymanager/branches/v3_4_0_GA_CP.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-16 06:47:58 -0400 (Tue, 16 Mar 2010)
New Revision: 19007
Modified:
entitymanager/branches/v3_4_0_GA_CP/pom.xml
Log:
JBPAPP-3922 add new DB profile - oracle11gR2RAC
Modified: entitymanager/branches/v3_4_0_GA_CP/pom.xml
===================================================================
--- entitymanager/branches/v3_4_0_GA_CP/pom.xml 2010-03-16 10:47:18 UTC (rev 19006)
+++ entitymanager/branches/v3_4_0_GA_CP/pom.xml 2010-03-16 10:47:58 UTC (rev 19007)
@@ -865,6 +865,25 @@
<jdbc.isolation>4096</jdbc.isolation>
</properties>
</profile>
+ <profile>
+ <id>oracle11gR2RAC</id>
+ <dependencies>
+ <dependency>
+ <groupId>com.oracle</groupId>
+ <artifactId>ojdbc6</artifactId>
+ <version>11.2.0.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <properties>
+ <db.dialect>org.hibernate.dialect.Oracle10gDialect</db.dialect>
+ <jdbc.driver>oracle.jdbc.driver.OracleDriver</jdbc.driver>
+ <jdbc.url>jdbc:oracle:thin:@(DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=vmg27-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=vmg28-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=qarac.jboss)))</jdbc.url>
+ <jdbc.user>hibbr330</jdbc.user>
+ <jdbc.pass>hibbr330</jdbc.pass>
+ <jdbc.isolation/>
+ </properties>
+ </profile>
</profiles>
<properties>
<slf4jVersion>1.5.8</slf4jVersion>
15 years, 8 months
Hibernate SVN: r19006 - annotations/branches/v3_4_0_GA_CP.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-16 06:47:18 -0400 (Tue, 16 Mar 2010)
New Revision: 19006
Modified:
annotations/branches/v3_4_0_GA_CP/pom.xml
Log:
JBPAPP-3922 add new DB profile - oracle11gR2RAC
Modified: annotations/branches/v3_4_0_GA_CP/pom.xml
===================================================================
--- annotations/branches/v3_4_0_GA_CP/pom.xml 2010-03-16 10:46:08 UTC (rev 19005)
+++ annotations/branches/v3_4_0_GA_CP/pom.xml 2010-03-16 10:47:18 UTC (rev 19006)
@@ -865,6 +865,25 @@
<jdbc.isolation>4096</jdbc.isolation>
</properties>
</profile>
+ <profile>
+ <id>oracle11gR2RAC</id>
+ <dependencies>
+ <dependency>
+ <groupId>com.oracle</groupId>
+ <artifactId>ojdbc6</artifactId>
+ <version>11.2.0.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <properties>
+ <db.dialect>org.hibernate.dialect.Oracle10gDialect</db.dialect>
+ <jdbc.driver>oracle.jdbc.driver.OracleDriver</jdbc.driver>
+ <jdbc.url>jdbc:oracle:thin:@(DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=vmg27-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=vmg28-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=qarac.jboss)))</jdbc.url>
+ <jdbc.user>hibbr330</jdbc.user>
+ <jdbc.pass>hibbr330</jdbc.pass>
+ <jdbc.isolation/>
+ </properties>
+ </profile>
</profiles>
<properties>
<slf4jVersion>1.5.8</slf4jVersion>
15 years, 8 months
Hibernate SVN: r19005 - core/branches/Branch_3_3_2_GA_CP/parent.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-16 06:46:08 -0400 (Tue, 16 Mar 2010)
New Revision: 19005
Modified:
core/branches/Branch_3_3_2_GA_CP/parent/pom.xml
Log:
JBPAPP-3922 add new DB profile - oracle11gR2RAC
Modified: core/branches/Branch_3_3_2_GA_CP/parent/pom.xml
===================================================================
--- core/branches/Branch_3_3_2_GA_CP/parent/pom.xml 2010-03-16 05:18:43 UTC (rev 19004)
+++ core/branches/Branch_3_3_2_GA_CP/parent/pom.xml 2010-03-16 10:46:08 UTC (rev 19005)
@@ -667,6 +667,25 @@
<jdbc.isolation/>
</properties>
</profile>
+ <profile>
+ <id>oracle11gR2RAC</id>
+ <dependencies>
+ <dependency>
+ <groupId>com.oracle</groupId>
+ <artifactId>ojdbc6</artifactId>
+ <version>11.2.0.1.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <properties>
+ <db.dialect>org.hibernate.dialect.Oracle10gDialect</db.dialect>
+ <jdbc.driver>oracle.jdbc.driver.OracleDriver</jdbc.driver>
+ <jdbc.url>jdbc:oracle:thin:@(DESCRIPTION=(LOAD_BALANCE=on)(ADDRESS=(PROTOCOL=TCP)(HOST=vmg27-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(ADDRESS=(PROTOCOL=TCP)(HOST=vmg28-vip.mw.lab.eng.bos.redhat.com)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=qarac.jboss)))</jdbc.url>
+ <jdbc.user>hibbr330</jdbc.user>
+ <jdbc.pass>hibbr330</jdbc.pass>
+ <jdbc.isolation/>
+ </properties>
+ </profile>
<!-- The Sybase 15 test envionment -->
<profile>
<id>sybase15</id>
15 years, 8 months
Hibernate SVN: r19004 - core/trunk/testsuite/src/test/java/org/hibernate/test/typeparameters.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-16 01:18:43 -0400 (Tue, 16 Mar 2010)
New Revision: 19004
Modified:
core/trunk/testsuite/src/test/java/org/hibernate/test/typeparameters/TypeParameterTest.java
Log:
HHH-5013 the previous select query should not to hold locks in TypeParameterTest#testSave
Modified: core/trunk/testsuite/src/test/java/org/hibernate/test/typeparameters/TypeParameterTest.java
===================================================================
--- core/trunk/testsuite/src/test/java/org/hibernate/test/typeparameters/TypeParameterTest.java 2010-03-16 01:42:26 UTC (rev 19003)
+++ core/trunk/testsuite/src/test/java/org/hibernate/test/typeparameters/TypeParameterTest.java 2010-03-16 05:18:43 UTC (rev 19004)
@@ -60,9 +60,10 @@
assertEquals("Non-Default value should not be changed", resultSet.getInt("VALUE_THREE"), 5);
assertTrue("Default value should have been mapped to null", resultSet.getObject("VALUE_FOUR") == null);
- deleteData();
+
t.commit();
s.close();
+ deleteData();
}
public void testLoading() throws Exception {
@@ -83,9 +84,10 @@
assertEquals("Default value incorrectly loaded", obj.getValueThree(), -1);
assertEquals("Default value incorrectly loaded", obj.getValueFour(), -5);
- deleteData();
+
t.commit();
s.close();
+ deleteData();
}
private void initData() throws Exception {
15 years, 8 months
Hibernate SVN: r19003 - in core/trunk/documentation/manual/src/main/docbook/zh-CN: fallback_content and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: xhuang(a)jboss.com
Date: 2010-03-15 21:42:26 -0400 (Mon, 15 Mar 2010)
New Revision: 19003
Modified:
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/best_practices.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/example_parentchild.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/persistent_classes.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/portability.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/preface.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_criteria.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_hql.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_sql.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/session_api.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/toolset_guide.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/transactions.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/tutorial.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/content/xml.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Conventions.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Feedback.po
core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Legal_Notice.po
Log:
update
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/best_practices.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/best_practices.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/best_practices.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:15\n"
-"PO-Revision-Date: 2009-11-06 10:05+1000\n"
+"PO-Revision-Date: 2010-03-16 10:01+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,24 +21,13 @@
#. Tag: term
#, no-c-format
-msgid ""
-"Write fine-grained classes and map them using <literal><component></"
-"literal>:"
-msgstr ""
-"设计细颗粒度的持久类并且使用 <literal><component></literal> 来实现映"
-"射:"
+msgid "Write fine-grained classes and map them using <literal><component></literal>:"
+msgstr "设计细颗粒度的持久类并且使用 <literal><component></literal> 来实现映射:"
#. Tag: para
#, no-c-format
-msgid ""
-"Use an <literal>Address</literal> class to encapsulate <literal>street</"
-"literal>, <literal>suburb</literal>, <literal>state</literal>, "
-"<literal>postcode</literal>. This encourages code reuse and simplifies "
-"refactoring."
-msgstr ""
-"使用一个 <literal>Address</literal> 持久类来封装 <literal>street</literal>,"
-"<literal>suburb</literal>,<literal>state</literal>,<literal>postcode</"
-"literal>。 这将有利于代码重用和简化代码重构(refactoring)的工作。"
+msgid "Use an <literal>Address</literal> class to encapsulate <literal>street</literal>, <literal>suburb</literal>, <literal>state</literal>, <literal>postcode</literal>. This encourages code reuse and simplifies refactoring."
+msgstr "使用一个 <literal>Address</literal> 持久类来封装 <literal>street</literal>,<literal>suburb</literal>,<literal>state</literal>,<literal>postcode</literal>。 这将有利于代码重用和简化代码重构(refactoring)的工作。"
#. Tag: term
#, no-c-format
@@ -47,13 +36,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate makes identifier properties optional. There are a range of reasons "
-"why you should use them. We recommend that identifiers be 'synthetic', that "
-"is, generated with no business meaning."
-msgstr ""
-"Hibernate 中标识符属性是可选的,不过有很多原因来说明你应该使用标识符属性。我"
-"们建议标识符应该是“人造”的(自动生成,不涉及业务含义)。 "
+msgid "Hibernate makes identifier properties optional. There are a range of reasons why you should use them. We recommend that identifiers be 'synthetic', that is, generated with no business meaning."
+msgstr "Hibernate 中标识符属性是可选的,不过有很多原因来说明你应该使用标识符属性。我们建议标识符应该是“人造”的(自动生成,不涉及业务含义)。 "
#. Tag: term
#, no-c-format
@@ -62,15 +46,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Identify natural keys for all entities, and map them using <literal><"
-"natural-id></literal>. Implement <literal>equals()</literal> and "
-"<literal>hashCode()</literal> to compare the properties that make up the "
-"natural key."
-msgstr ""
-"对所有的实体都标识出自然键,用 <literal><natural-id></literal> 进行映"
-"射。实现 <literal>equals()</literal> 和 <literal>hashCode()</literal>,在其中"
-"用组成自然键的属性进行比较。"
+msgid "Identify natural keys for all entities, and map them using <literal><natural-id></literal>. Implement <literal>equals()</literal> and <literal>hashCode()</literal> to compare the properties that make up the natural key."
+msgstr "对所有的实体都标识出自然键,用 <literal><natural-id></literal> 进行映射。实现 <literal>equals()</literal> 和 <literal>hashCode()</literal>,在其中用组成自然键的属性进行比较。"
#. Tag: term
#, no-c-format
@@ -79,14 +56,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Do not use a single monolithic mapping document. Map <literal>com.eg.Foo</"
-"literal> in the file <literal>com/eg/Foo.hbm.xml</literal>. This makes "
-"sense, particularly in a team environment."
-msgstr ""
-"不要把所有的持久类映射都写到一个大文件中。把 <literal>com.eg.Foo</literal> 映"
-"射到 <literal>com/eg/Foo.hbm.xml</literal> 中。在团队开发环境中,这一点尤其重"
-"要。"
+msgid "Do not use a single monolithic mapping document. Map <literal>com.eg.Foo</literal> in the file <literal>com/eg/Foo.hbm.xml</literal>. This makes sense, particularly in a team environment."
+msgstr "不要把所有的持久类映射都写到一个大文件中。把 <literal>com.eg.Foo</literal> 映射到 <literal>com/eg/Foo.hbm.xml</literal> 中。在团队开发环境中,这一点尤其重要。"
#. Tag: term
#, no-c-format
@@ -105,13 +76,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This is recommended if your queries call non-ANSI-standard SQL functions. "
-"Externalizing the query strings to mapping files will make the application "
-"more portable."
-msgstr ""
-"如果你的查询中调用了非 ANSI 标准的 SQL 函数,那么这条实践经验对你适用。把查询"
-"字符串放在映射文件中可以让程序具有更好的可移植性。 "
+msgid "This is recommended if your queries call non-ANSI-standard SQL functions. Externalizing the query strings to mapping files will make the application more portable."
+msgstr "如果你的查询中调用了非 ANSI 标准的 SQL 函数,那么这条实践经验对你适用。把查询字符串放在映射文件中可以让程序具有更好的可移植性。 "
#. Tag: term
#, no-c-format
@@ -120,13 +86,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"As in JDBC, always replace non-constant values by \"?\". Do not use string "
-"manipulation to bind a non-constant value in a query. You should also "
-"consider using named parameters in queries."
-msgstr ""
-"就像在 JDBC 编程中一样,应该总是用占位符 \"?\" 来替换非常量值,不要在查询中用"
-"字符串值来构造非常量值。你也应该考虑在查询中使用命名参数。"
+msgid "As in JDBC, always replace non-constant values by \"?\". Do not use string manipulation to bind a non-constant value in a query. You should also consider using named parameters in queries."
+msgstr "就像在 JDBC 编程中一样,应该总是用占位符 \"?\" 来替换非常量值,不要在查询中用字符串值来构造非常量值。你也应该考虑在查询中使用命名参数。"
#. Tag: term
#, no-c-format
@@ -135,15 +96,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate allows the application to manage JDBC connections, but his "
-"approach should be considered a last-resort. If you cannot use the built-in "
-"connection providers, consider providing your own implementation of "
-"<literal>org.hibernate.connection.ConnectionProvider</literal>."
-msgstr ""
-"Hibernate 允许应用程序自己来管理 JDBC 连接,但是应该作为最后没有办法的办法。"
-"如果你不能使用 Hibernate 内建的 connections providers,那么考虑实现自己来实"
-"现 <literal>org.hibernate.connection.ConnectionProvider</literal>。"
+msgid "Hibernate allows the application to manage JDBC connections, but his approach should be considered a last-resort. If you cannot use the built-in connection providers, consider providing your own implementation of <literal>org.hibernate.connection.ConnectionProvider</literal>."
+msgstr "Hibernate 允许应用程序自己来管理 JDBC 连接,但是应该作为最后没有办法的办法。如果你不能使用 Hibernate 内建的 connections providers,那么考虑实现自己来实现 <literal>org.hibernate.connection.ConnectionProvider</literal>。"
#. Tag: term
#, no-c-format
@@ -152,17 +106,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Suppose you have a Java type from a library that needs to be persisted but "
-"does not provide the accessors needed to map it as a component. You should "
-"consider implementing <literal>org.hibernate.UserType</literal>. This "
-"approach frees the application code from implementing transformations to/"
-"from a Hibernate type."
-msgstr ""
-"假设你有一个 Java 类型,来自某些类库,需要被持久化,但是该类没有提供映射操作"
-"需要的存取方法。那么你应该考虑实现 <literal>org.hibernate.UserType</literal> "
-"接口。这种办法使程序代码写起来更加自如,不再需要考虑类与 Hibernate type 之间"
-"的相互转换。 "
+msgid "Suppose you have a Java type from a library that needs to be persisted but does not provide the accessors needed to map it as a component. You should consider implementing <literal>org.hibernate.UserType</literal>. This approach frees the application code from implementing transformations to/from a Hibernate type."
+msgstr "假设你有一个 Java 类型,来自某些类库,需要被持久化,但是该类没有提供映射操作需要的存取方法。那么你应该考虑实现 <literal>org.hibernate.UserType</literal> 接口。这种办法使程序代码写起来更加自如,不再需要考虑类与 Hibernate type 之间的相互转换。 "
#. Tag: term
#, no-c-format
@@ -170,22 +115,9 @@
msgstr "在性能瓶颈的地方使用硬编码的 JDBC:"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"In performance-critical areas of the system, some kinds of operations might "
-"benefit from direct JDBC. Do not assume, however, that JDBC is necessarily "
-"faster. Please wait until you <emphasis>know</emphasis> something is a "
-"bottleneck. If you need to use direct JDBC, you can open a Hibernate "
-"<literal>Session</literal>, wrap your JDBC operation as a <literal>org."
-"hibernate.jdbc.Work</literal> object and using that JDBC connection. This "
-"way you can still use the same transaction strategy and underlying "
-"connection provider."
-msgstr ""
-"在系统中对性能要求很严格的一些部分,某些操作也许直接使用 JDBC 会更好。但是请"
-"先<emphasis>确认</emphasis>这的确是一个瓶颈,并且不要想当然认为 JDBC 一定会更"
-"快。如果确实需要直接使用 JDBC,那么最好打开一个 Hibernate <literal>Session</"
-"literal> 然后从 <literal>Session</literal> 获得 connection,按照这种办法你仍"
-"然可以使用同样的 transaction 策略和底层的 connection provider。 "
+#, no-c-format
+msgid "In performance-critical areas of the system, some kinds of operations might benefit from direct JDBC. Do not assume, however, that JDBC is necessarily faster. Please wait until you <emphasis>know</emphasis> something is a bottleneck. If you need to use direct JDBC, you can open a Hibernate <literal>Session</literal>, wrap your JDBC operation as a <literal>org.hibernate.jdbc.Work</literal> object and using that JDBC connection. This way you can still use the same transaction strategy and underlying connection provider."
+msgstr "在系统中对性能要求很严格的一些部分,某些操作也许直接使用 JDBC 会更好。但是请先<emphasis>确认</emphasis>这的确是一个瓶颈,并且不要想当然认为 JDBC 一定会更快。如果确实需要直接使用 JDBC,那么最好打开一个 Hibernate <literal>Session</literal> 然后将 JDBC 操作包裹为 <literal>org.hibernate.jdbc.Work</literal> 并使用 JDBC 连接。按照这种办法你仍然可以使用同样的 transaction 策略和底层的 connection provider。 "
#. Tag: term
#, no-c-format
@@ -194,16 +126,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Sometimes the Session synchronizes its persistent state with the database. "
-"Performance will be affected if this process occurs too often. You can "
-"sometimes minimize unnecessary flushing by disabling automatic flushing, or "
-"even by changing the order of queries and other operations within a "
-"particular transaction."
-msgstr ""
-"Session 会不时的向数据库同步持久化状态,如果这种操作进行的过于频繁,性能会受"
-"到一定的影响。有时候你可以通过禁止自动 flushing,尽量最小化非必要的 flushing "
-"操作,或者更进一步,在一个特定的 transaction 中改变查询和其它操作的顺序。 "
+msgid "Sometimes the Session synchronizes its persistent state with the database. Performance will be affected if this process occurs too often. You can sometimes minimize unnecessary flushing by disabling automatic flushing, or even by changing the order of queries and other operations within a particular transaction."
+msgstr "Session 会不时的向数据库同步持久化状态,如果这种操作进行的过于频繁,性能会受到一定的影响。有时候你可以通过禁止自动 flushing,尽量最小化非必要的 flushing 操作,或者更进一步,在一个特定的 transaction 中改变查询和其它操作的顺序。 "
#. Tag: term
#, no-c-format
@@ -212,17 +136,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"When using a servlet/session bean architecture, you can pass persistent "
-"objects loaded in the session bean to and from the servlet/JSP layer. Use a "
-"new session to service each request. Use <literal>Session.merge()</literal> "
-"or <literal>Session.saveOrUpdate()</literal> to synchronize objects with the "
-"database."
-msgstr ""
-"当使用一个 servlet / session bean 类型的架构的时候, 你可以把已加载的持久对象"
-"在 session bean 层和 servlet / JSP 层之间来回传递。使用新的 session 来为每个"
-"请求服务,使用 <literal>Session.merge()</literal> 或者 <literal>Session."
-"saveOrUpdate()</literal> 来与数据库同步。 "
+msgid "When using a servlet/session bean architecture, you can pass persistent objects loaded in the session bean to and from the servlet/JSP layer. Use a new session to service each request. Use <literal>Session.merge()</literal> or <literal>Session.saveOrUpdate()</literal> to synchronize objects with the database."
+msgstr "当使用一个 servlet / session bean 类型的架构的时候, 你可以把已加载的持久对象在 session bean 层和 servlet / JSP 层之间来回传递。使用新的 session 来为每个请求服务,使用 <literal>Session.merge()</literal> 或者 <literal>Session.saveOrUpdate()</literal> 来与数据库同步。 "
#. Tag: term
#, no-c-format
@@ -231,27 +146,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Database Transactions have to be as short as possible for best scalability. "
-"However, it is often necessary to implement long running "
-"<emphasis>application transactions</emphasis>, a single unit-of-work from "
-"the point of view of a user. An application transaction might span several "
-"client request/response cycles. It is common to use detached objects to "
-"implement application transactions. An appropriate alternative in a two "
-"tiered architecture, is to maintain a single open persistence contact "
-"session for the whole life cycle of the application transaction. Then simply "
-"disconnect from the JDBC connection at the end of each request and reconnect "
-"at the beginning of the subsequent request. Never share a single session "
-"across more than one application transaction or you will be working with "
-"stale data."
-msgstr ""
-"为了得到最佳的可伸缩性,数据库事务(Database Transaction)应该尽可能的短。但"
-"是,程序常常需要实现长时间运行的<emphasis>“应用程序事务(Application "
-"Transaction)”</emphasis>,包含一个从用户的观点来看的原子操作。这个应用程序事"
-"务可能跨越多次从用户请求到得到反馈的循环。用脱管对象(与 session 脱离的对象)"
-"来实现应用程序事务是常见的。或者,尤其在两层结构中,把 Hibernate Session 从 "
-"JDBC 连接中脱离开,下次需要用的时候再连接上。绝不要把一个 Session 用在多个应"
-"用程序事务(Application Transaction)中,否则你的数据可能会过期失效。"
+msgid "Database Transactions have to be as short as possible for best scalability. However, it is often necessary to implement long running <emphasis>application transactions</emphasis>, a single unit-of-work from the point of view of a user. An application transaction might span several client request/response cycles. It is common to use detached objects to implement application transactions. An appropriate alternative in a two tiered architecture, is to maintain a single open persistence contact session for the whole life cycle of the application transaction. Then simply disconnect from the JDBC connection at the end of each request and reconnect at the beginning of the subsequent request. Never share a single session across more than one application transaction or you will be working with stale data."
+msgstr "为了得到最佳的可伸缩性,数据库事务(Database Transaction)应该尽可能的短。但是,程序常常需要实现长时间运行的<emphasis>“应用程序事务(Application Transaction)”</emphasis>,包含一个从用户的观点来看的原子操作。这个应用程序事务可能跨越多次从用户请求到得到反馈的循环。用脱管对象(与 session 脱离的对象)来实现应用程序事务是常见的。或者,尤其在两层结构中,把 Hibernate Session 从 JDBC 连接中脱离开,下次需要用的时候再连接上。绝不要把一个 Session 用在多个应用程序事务(Application Transaction)中,否则你的数据可能会过期失效。"
#. Tag: term
#, no-c-format
@@ -260,20 +156,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This is more of a necessary practice than a \"best\" practice. When an "
-"exception occurs, roll back the <literal>Transaction</literal> and close the "
-"<literal>Session</literal>. If you do not do this, Hibernate cannot "
-"guarantee that in-memory state accurately represents the persistent state. "
-"For example, do not use <literal>Session.load()</literal> to determine if an "
-"instance with the given identifier exists on the database; use "
-"<literal>Session.get()</literal> or a query instead."
-msgstr ""
-"这一点甚至比“最佳实践”还要重要,这是“必备常识”。当异常发生的时候,必须要回滚 "
-"<literal>Transaction</literal> ,关闭 <literal>Session</literal>。如果你不这"
-"样做的话,Hibernate 无法保证内存状态精确的反应持久状态。尤其不要使用 "
-"<literal>Session.load()</literal> 来判断一个给定标识符的对象实例在数据库中是"
-"否存在,应该使用 <literal>Session.get()</literal> 或者进行一次查询。"
+msgid "This is more of a necessary practice than a \"best\" practice. When an exception occurs, roll back the <literal>Transaction</literal> and close the <literal>Session</literal>. If you do not do this, Hibernate cannot guarantee that in-memory state accurately represents the persistent state. For example, do not use <literal>Session.load()</literal> to determine if an instance with the given identifier exists on the database; use <literal>Session.get()</literal> or a query instead."
+msgstr "这一点甚至比“最佳实践”还要重要,这是“必备常识”。当异常发生的时候,必须要回滚 <literal>Transaction</literal> ,关闭 <literal>Session</literal>。如果你不这样做的话,Hibernate 无法保证内存状态精确的反应持久状态。尤其不要使用 <literal>Session.load()</literal> 来判断一个给定标识符的对象实例在数据库中是否存在,应该使用 <literal>Session.get()</literal> 或者进行一次查询。"
#. Tag: term
#, no-c-format
@@ -282,55 +166,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Use eager fetching sparingly. Use proxies and lazy collections for most "
-"associations to classes that are not likely to be completely held in the "
-"second-level cache. For associations to cached classes, where there is an a "
-"extremely high probability of a cache hit, explicitly disable eager fetching "
-"using <literal>lazy=\"false\"</literal>. When join fetching is appropriate "
-"to a particular use case, use a query with a <literal>left join fetch</"
-"literal>."
-msgstr ""
-"谨慎的使用主动抓取(eager fetching)。对于关联来说,若其目标是无法在第二级缓"
-"存中完全缓存所有实例的类,应该使用代理(proxies)与/或具有延迟加载属性的集合"
-"(lazy collections)。若目标是可以被缓存的,尤其是缓存的命中率非常高的情况"
-"下,应该使用 <literal>lazy=\"false\"</literal>,明确的禁止掉 eager fetching。"
-"如果那些特殊的确实适合使用 join fetch 的场合,请在查询中使用 <literal>left "
-"join fetch</literal>。 "
+msgid "Use eager fetching sparingly. Use proxies and lazy collections for most associations to classes that are not likely to be completely held in the second-level cache. For associations to cached classes, where there is an a extremely high probability of a cache hit, explicitly disable eager fetching using <literal>lazy=\"false\"</literal>. When join fetching is appropriate to a particular use case, use a query with a <literal>left join fetch</literal>."
+msgstr "谨慎的使用主动抓取(eager fetching)。对于关联来说,若其目标是无法在第二级缓存中完全缓存所有实例的类,应该使用代理(proxies)与/或具有延迟加载属性的集合(lazy collections)。若目标是可以被缓存的,尤其是缓存的命中率非常高的情况下,应该使用 <literal>lazy=\"false\"</literal>,明确的禁止掉 eager fetching。如果那些特殊的确实适合使用 join fetch 的场合,请在查询中使用 <literal>left join fetch</literal>。 "
#. Tag: term
#, no-c-format
-msgid ""
-"Use the <emphasis>open session in view</emphasis> pattern, or a disciplined "
-"<emphasis>assembly phase</emphasis> to avoid problems with unfetched data:"
-msgstr ""
-"使用 <emphasis>open session in view</emphasis> 模式,或者执行严格的<emphasis>"
-"装配期(assembly phase)</emphasis>策略来避免再次抓取数据带来的问题:"
+msgid "Use the <emphasis>open session in view</emphasis> pattern, or a disciplined <emphasis>assembly phase</emphasis> to avoid problems with unfetched data:"
+msgstr "使用 <emphasis>open session in view</emphasis> 模式,或者执行严格的<emphasis>装配期(assembly phase)</emphasis>策略来避免再次抓取数据带来的问题:"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate frees the developer from writing tedious <emphasis>Data Transfer "
-"Objects</emphasis> (DTO). In a traditional EJB architecture, DTOs serve dual "
-"purposes: first, they work around the problem that entity beans are not "
-"serializable; second, they implicitly define an assembly phase where all "
-"data to be used by the view is fetched and marshalled into the DTOs before "
-"returning control to the presentation tier. Hibernate eliminates the first "
-"purpose. Unless you are prepared to hold the persistence context (the "
-"session) open across the view rendering process, you will still need an "
-"assembly phase. Think of your business methods as having a strict contract "
-"with the presentation tier about what data is available in the detached "
-"objects. This is not a limitation of Hibernate. It is a fundamental "
-"requirement of safe transactional data access."
-msgstr ""
-"Hibernate 让开发者们摆脱了繁琐的 <emphasis>Data Transfer Objects</emphasis>"
-"(DTO)。在传统的 EJB 结构中,DTO 有双重作用:首先,他们解决了 entity bean 无"
-"法序列化的问题;其次,他们隐含地定义了一个装配期,在此期间,所有在 view 层需"
-"要用到的数据,都被抓取、集中到了 DTO 中,然后控制才被装到表示层。Hibernate 终"
-"结了第一个作用。然而,除非你做好了在整个渲染过程中都维护一个打开的持久化上下"
-"文(session)的准备,你仍然需要一个装配期(想象一下,你的业务方法与你的表示层"
-"有严格的契约,数据总是被放置到脱管对象中)。这并非是 Hibernate 的限制,这是实"
-"现安全的事务化数据访问的基本需求。"
+msgid "Hibernate frees the developer from writing tedious <emphasis>Data Transfer Objects</emphasis> (DTO). In a traditional EJB architecture, DTOs serve dual purposes: first, they work around the problem that entity beans are not serializable; second, they implicitly define an assembly phase where all data to be used by the view is fetched and marshalled into the DTOs before returning control to the presentation tier. Hibernate eliminates the first purpose. Unless you are prepared to hold the persistence context (the session) open across the view rendering process, you will still need an assembly phase. Think of your business methods as having a strict contract with the presentation tier about what data is available in the detached objects. This is not a limitation of Hibernate. It is a fundamental requirement of safe transactional data access."
+msgstr "Hibernate 让开发者们摆脱了繁琐的 <emphasis>Data Transfer Objects</emphasis>(DTO)。在传统的 EJB 结构中,DTO 有双重作用:首先,他们解决了 entity bean 无法序列化的问题;其次,他们隐含地定义了一个装配期,在此期间,所有在 view 层需要用到的数据,都被抓取、集中到了 DTO 中,然后控制才被装到表示层。Hibernate 终结了第一个作用。然而,除非你做好了在整个渲染过程中都维护一个打开的持久化上下文(session)的准备,你仍然需要一个装配期(想象一下,你的业务方法与你的表示层有严格的契约,数据总是被放置到脱管对象中)。这并非是 Hibernate 的限制,这是实现安全的事务化数据访问的基本需求。"
#. Tag: term
#, no-c-format
@@ -339,19 +186,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hide Hibernate data-access code behind an interface. Combine the "
-"<emphasis>DAO</emphasis> and <emphasis>Thread Local Session</emphasis> "
-"patterns. You can even have some classes persisted by handcoded JDBC "
-"associated to Hibernate via a <literal>UserType</literal>. This advice is, "
-"however, intended for \"sufficiently large\" applications. It is not "
-"appropriate for an application with five tables."
-msgstr ""
-"把 Hibernate 的数据存取代码隐藏到接口(interface)的后面,组合使用 "
-"<emphasis>DAO</emphasis> 和 <emphasis>Thread Local Session</emphasis> 模式。"
-"通过 Hibernate 的<literal>UserType</literal>,你甚至可以用硬编码的 JDBC 来持"
-"久化那些本该被 Hibernate 持久化的类。然而,该建议更适用于规模足够大应用软件"
-"中,对于那些只有 5 张表的应用程序并不适合。"
+msgid "Hide Hibernate data-access code behind an interface. Combine the <emphasis>DAO</emphasis> and <emphasis>Thread Local Session</emphasis> patterns. You can even have some classes persisted by handcoded JDBC associated to Hibernate via a <literal>UserType</literal>. This advice is, however, intended for \"sufficiently large\" applications. It is not appropriate for an application with five tables."
+msgstr "把 Hibernate 的数据存取代码隐藏到接口(interface)的后面,组合使用 <emphasis>DAO</emphasis> 和 <emphasis>Thread Local Session</emphasis> 模式。通过 Hibernate 的<literal>UserType</literal>,你甚至可以用硬编码的 JDBC 来持久化那些本该被 Hibernate 持久化的类。然而,该建议更适用于规模足够大应用软件中,对于那些只有 5 张表的应用程序并不适合。"
#. Tag: term
#, no-c-format
@@ -360,17 +196,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Practical test cases for real many-to-many associations are rare. Most of "
-"the time you need additional information stored in the \"link table\". In "
-"this case, it is much better to use two one-to-many associations to an "
-"intermediate link class. In fact, most associations are one-to-many and many-"
-"to-one. For this reason, you should proceed cautiously when using any other "
-"association style."
-msgstr ""
-"多对多连接用得好的例子实际上相当少见。大多数时候你在“连接表”中需要保存额外的"
-"信息。这种情况下,用两个指向中介类的一对多的连接比较好。实际上,我们认为绝大"
-"多数的连接是一对多和多对一的。i因此,你应该谨慎使用其它连接风格。"
+msgid "Practical test cases for real many-to-many associations are rare. Most of the time you need additional information stored in the \"link table\". In this case, it is much better to use two one-to-many associations to an intermediate link class. In fact, most associations are one-to-many and many-to-one. For this reason, you should proceed cautiously when using any other association style."
+msgstr "多对多连接用得好的例子实际上相当少见。大多数时候你在“连接表”中需要保存额外的信息。这种情况下,用两个指向中介类的一对多的连接比较好。实际上,我们认为绝大多数的连接是一对多和多对一的。i因此,你应该谨慎使用其它连接风格。"
#. Tag: term
#, no-c-format
@@ -379,9 +206,6 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Unidirectional associations are more difficult to query. In a large "
-"application, almost all associations must be navigable in both directions in "
-"queries."
-msgstr ""
-"单向关联更加难于查询。在大型应用中,几乎所有的关联必须在查询中可以双向导航。"
+msgid "Unidirectional associations are more difficult to query. In a large application, almost all associations must be navigable in both directions in queries."
+msgstr "单向关联更加难于查询。在大型应用中,几乎所有的关联必须在查询中可以双向导航。"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/example_parentchild.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/example_parentchild.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/example_parentchild.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-10T07:25:35\n"
-"PO-Revision-Date: 2009-11-27 13:54+1000\n"
+"PO-Revision-Date: 2010-03-16 10:04+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,31 +21,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"One of the first things that new users want to do with Hibernate is to model "
-"a parent/child type relationship. There are two different approaches to "
-"this. The most convenient approach, especially for new users, is to model "
-"both <literal>Parent</literal> and <literal>Child</literal> as entity "
-"classes with a <literal><one-to-many></literal> association from "
-"<literal>Parent</literal> to <literal>Child</literal>. The alternative "
-"approach is to declare the <literal>Child</literal> as a <literal><"
-"composite-element></literal>. The default semantics of a one-to-many "
-"association in Hibernate are much less close to the usual semantics of a "
-"parent/child relationship than those of a composite element mapping. We will "
-"explain how to use a <emphasis>bidirectional one-to-many association with "
-"cascades</emphasis> to model a parent/child relationship efficiently and "
-"elegantly."
-msgstr ""
-"刚刚接触 Hibernate 的人大多是从父子关系(parent / child type relationship)的"
-"建模入手的。父子关系的建模有两种方法。由于种种原因,最方便的方法是把 "
-"<literal>Parent</literal> 和 <literal>Child</literal> 都建模成实体类,并创建"
-"一个从 <literal>Parent</literal> 指向 <literal>Child</literal> 的 <one-to-"
-"many> 关联,对新手来说尤其如此。还有一种方法,就是将 <literal>Child</"
-"literal> 声明为一个 <literal><composite-element></literal>(组合元"
-"素)。 事实上在 Hibernate 中 one to many 关联的默认语义远没有 composite "
-"element 贴近 parent / child 关系的通常语义。下面我们会阐述如何使用<emphasis>"
-"带有级联的双向一对多关联(idirectional one to many association with "
-"cascades)</emphasis>去建立有效、优美的 parent / child 关系。"
+msgid "One of the first things that new users want to do with Hibernate is to model a parent/child type relationship. There are two different approaches to this. The most convenient approach, especially for new users, is to model both <literal>Parent</literal> and <literal>Child</literal> as entity classes with a <literal><one-to-many></literal> association from <literal>Parent</literal> to <literal>Child</literal>. The alternative approach is to declare the <literal>Child</literal> as a <literal><composite-element></literal>. The default semantics of a one-to-many association in Hibernate are much less close to the usual semantics of a parent/child relationship than those of a composite element mapping. We will explain how to use a <emphasis>bidirectional one-to-many association with cascades</emphasis> to model a parent/child relationship efficiently and elegantly."
+msgstr "刚刚接触 Hibernate 的人大多是从父子关系(parent / child type relationship)的建模入手的。父子关系的建模有两种方法。由于种种原因,最方便的方法是把 <literal>Parent</literal> 和 <literal>Child</literal> 都建模成实体类,并创建一个从 <literal>Parent</literal> 指向 <literal>Child</literal> 的 <one-to-many> 关联,对新手来说尤其如此。还有一种方法,就是将 <literal>Child</literal> 声明为一个 <literal><composite-element></literal>(组合元素)。 事实上在 Hibernate 中 one to many 关联的默认语义远没有 composite element 贴近 parent / child 关系的通常语义。下面我们会阐述如何使用<emphasis>带有级联的双向一对多关联(idirectional one to many association with cascades)</emphasis>去建立有效、优美的 parent / child 关系。"
#. Tag: title
#, no-c-format
@@ -54,62 +31,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate collections are considered to be a logical part of their owning "
-"entity and not of the contained entities. Be aware that this is a critical "
-"distinction that has the following consequences:"
-msgstr ""
-"Hibernate collections 被当作其所属实体而不是其包含实体的一个逻辑部分。这非常"
-"重要,它主要体现为以下几点:"
+msgid "Hibernate collections are considered to be a logical part of their owning entity and not of the contained entities. Be aware that this is a critical distinction that has the following consequences:"
+msgstr "Hibernate collections 被当作其所属实体而不是其包含实体的一个逻辑部分。这非常重要,它主要体现为以下几点:"
#. Tag: para
#, no-c-format
-msgid ""
-"When you remove/add an object from/to a collection, the version number of "
-"the collection owner is incremented."
-msgstr ""
-"当删除或增加 collection 中对象的时候,collection 所属者的版本值会递增。 "
+msgid "When you remove/add an object from/to a collection, the version number of the collection owner is incremented."
+msgstr "当删除或增加 collection 中对象的时候,collection 所属者的版本值会递增。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If an object that was removed from a collection is an instance of a value "
-"type (e.g. a composite element), that object will cease to be persistent and "
-"its state will be completely removed from the database. Likewise, adding a "
-"value type instance to the collection will cause its state to be immediately "
-"persistent."
-msgstr ""
-"如果一个从 collection 中移除的对象是一个值类型(value type)的实例,比如 "
-"composite element,那么这个对象的持久化状态将会终止,其在数据库中对应的记录会"
-"被删除。同样的,向 collection 增加一个 value type 的实例将会使之立即被持久"
-"化。 "
+msgid "If an object that was removed from a collection is an instance of a value type (e.g. a composite element), that object will cease to be persistent and its state will be completely removed from the database. Likewise, adding a value type instance to the collection will cause its state to be immediately persistent."
+msgstr "如果一个从 collection 中移除的对象是一个值类型(value type)的实例,比如 composite element,那么这个对象的持久化状态将会终止,其在数据库中对应的记录会被删除。同样的,向 collection 增加一个 value type 的实例将会使之立即被持久化。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Conversely, if an entity is removed from a collection (a one-to-many or many-"
-"to-many association), it will not be deleted by default. This behavior is "
-"completely consistent; a change to the internal state of another entity "
-"should not cause the associated entity to vanish. Likewise, adding an entity "
-"to a collection does not cause that entity to become persistent, by default."
-msgstr ""
-"另一方面,如果从一对多或多对多关联的 collection 中移除一个实体,在缺省情况下"
-"这个对象并不会被删除。这个行为是完全合乎逻辑的--改变一个实体的内部状态不应"
-"该使与它关联的实体消失掉。同样的,向 collection 增加一个实体不会使之被持久"
-"化。 "
+msgid "Conversely, if an entity is removed from a collection (a one-to-many or many-to-many association), it will not be deleted by default. This behavior is completely consistent; a change to the internal state of another entity should not cause the associated entity to vanish. Likewise, adding an entity to a collection does not cause that entity to become persistent, by default."
+msgstr "另一方面,如果从一对多或多对多关联的 collection 中移除一个实体,在缺省情况下这个对象并不会被删除。这个行为是完全合乎逻辑的--改变一个实体的内部状态不应该使与它关联的实体消失掉。同样的,向 collection 增加一个实体不会使之被持久化。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Adding an entity to a collection, by default, merely creates a link between "
-"the two entities. Removing the entity will remove the link. This is "
-"appropriate for all sorts of cases. However, it is not appropriate in the "
-"case of a parent/child relationship. In this case, the life of the child is "
-"bound to the life cycle of the parent."
-msgstr ""
-"实际上,向 Collection 增加一个实体的缺省动作只是在两个实体之间创建一个连接而"
-"已,同样移除的时候也只是删除连接。这种处理对于所有的情况都是合适的。对于父子"
-"关系则是完全不适合的,在这种关系下,子对象的生存绑定于父对象的生存周期。 "
+msgid "Adding an entity to a collection, by default, merely creates a link between the two entities. Removing the entity will remove the link. This is appropriate for all sorts of cases. However, it is not appropriate in the case of a parent/child relationship. In this case, the life of the child is bound to the life cycle of the parent."
+msgstr "实际上,向 Collection 增加一个实体的缺省动作只是在两个实体之间创建一个连接而已,同样移除的时候也只是删除连接。这种处理对于所有的情况都是合适的。对于父子关系则是完全不适合的,在这种关系下,子对象的生存绑定于父对象的生存周期。 "
#. Tag: title
#, no-c-format
@@ -118,11 +61,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Suppose we start with a simple <literal><one-to-many></literal> "
-"association from <literal>Parent</literal> to <literal>Child</literal>."
-msgstr ""
-"假设我们要实现一个简单的从 Parent 到 Child 的 <one-to-many> 关联。"
+msgid "Suppose we start with a simple <literal><one-to-many></literal> association from <literal>Parent</literal> to <literal>Child</literal>."
+msgstr "假设我们要实现一个简单的从 Parent 到 Child 的 <one-to-many> 关联。"
#. Tag: para
#, no-c-format
@@ -136,31 +76,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"an <literal>INSERT</literal> to create the record for <literal>c</literal>"
-msgstr ""
-"一条 <literal>INSERT</literal> 语句,为 <literal>c</literal> 创建一条记录"
+msgid "an <literal>INSERT</literal> to create the record for <literal>c</literal>"
+msgstr "一条 <literal>INSERT</literal> 语句,为 <literal>c</literal> 创建一条记录"
#. Tag: para
#, no-c-format
-msgid ""
-"an <literal>UPDATE</literal> to create the link from <literal>p</literal> to "
-"<literal>c</literal>"
-msgstr ""
-"一条 <literal>UPDATE</literal> 语句,创建从 <literal>p</literal> 到 "
-"<literal>c</literal> 的连接"
+msgid "an <literal>UPDATE</literal> to create the link from <literal>p</literal> to <literal>c</literal>"
+msgstr "一条 <literal>UPDATE</literal> 语句,创建从 <literal>p</literal> 到 <literal>c</literal> 的连接"
#. Tag: para
#, no-c-format
-msgid ""
-"This is not only inefficient, but also violates any <literal>NOT NULL</"
-"literal> constraint on the <literal>parent_id</literal> column. You can fix "
-"the nullability constraint violation by specifying <literal>not-null=\"true"
-"\"</literal> in the collection mapping:"
-msgstr ""
-"这样做不仅效率低,而且违反了 <literal>parent_id</literal> 列 "
-"<literal>parent_id</literal> 非空的限制。我们可以通过在集合类映射上指定 "
-"<literal>not-null=\"true\"</literal> 来解决违反非空约束的问题:"
+msgid "This is not only inefficient, but also violates any <literal>NOT NULL</literal> constraint on the <literal>parent_id</literal> column. You can fix the nullability constraint violation by specifying <literal>not-null=\"true\"</literal> in the collection mapping:"
+msgstr "这样做不仅效率低,而且违反了 <literal>parent_id</literal> 列 <literal>parent_id</literal> 非空的限制。我们可以通过在集合类映射上指定 <literal>not-null=\"true\"</literal> 来解决违反非空约束的问题:"
#. Tag: para
#, no-c-format
@@ -169,35 +96,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The underlying cause of this behavior is that the link (the foreign key "
-"<literal>parent_id</literal>) from <literal>p</literal> to <literal>c</"
-"literal> is not considered part of the state of the <literal>Child</literal> "
-"object and is therefore not created in the <literal>INSERT</literal>. The "
-"solution is to make the link part of the <literal>Child</literal> mapping."
-msgstr ""
-"这种现象的根本原因是从 <literal>p</literal> 到 <literal>c</literal> 的连接"
-"(外键 parent_id)没有被当作 <literal>Child</literal> 对象状态的一部分,因而"
-"没有在 <literal>INSERT</literal> 语句中被创建。因此解决的办法就是把这个连接添"
-"加到 <literal>Child</literal> 的映射中。"
+msgid "The underlying cause of this behavior is that the link (the foreign key <literal>parent_id</literal>) from <literal>p</literal> to <literal>c</literal> is not considered part of the state of the <literal>Child</literal> object and is therefore not created in the <literal>INSERT</literal>. The solution is to make the link part of the <literal>Child</literal> mapping."
+msgstr "这种现象的根本原因是从 <literal>p</literal> 到 <literal>c</literal> 的连接(外键 parent_id)没有被当作 <literal>Child</literal> 对象状态的一部分,因而没有在 <literal>INSERT</literal> 语句中被创建。因此解决的办法就是把这个连接添加到 <literal>Child</literal> 的映射中。"
#. Tag: para
#, no-c-format
-msgid ""
-"You also need to add the <literal>parent</literal> property to the "
-"<literal>Child</literal> class."
-msgstr ""
-"你还需要为类 <literal>Child</literal> 添加 <literal>parent</literal> 属性。"
+msgid "You also need to add the <literal>parent</literal> property to the <literal>Child</literal> class."
+msgstr "你还需要为类 <literal>Child</literal> 添加 <literal>parent</literal> 属性。"
#. Tag: para
#, no-c-format
-msgid ""
-"Now that the <literal>Child</literal> entity is managing the state of the "
-"link, we tell the collection not to update the link. We use the "
-"<literal>inverse</literal> attribute to do this:"
-msgstr ""
-"现在实体 <literal>Child</literal> 在管理连接的状态,为了使 collection 不更新"
-"连接,我们使用 <literal>inverse</literal> 属性:"
+msgid "Now that the <literal>Child</literal> entity is managing the state of the link, we tell the collection not to update the link. We use the <literal>inverse</literal> attribute to do this:"
+msgstr "现在实体 <literal>Child</literal> 在管理连接的状态,为了使 collection 不更新连接,我们使用 <literal>inverse</literal> 属性:"
#. Tag: para
#, no-c-format
@@ -211,12 +121,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You could also create an <literal>addChild()</literal> method of "
-"<literal>Parent</literal>."
-msgstr ""
-"为了让事情变得井井有条,可以为 <literal>Parent</literal> 加一个 "
-"<literal>addChild()</literal> 方法。"
+msgid "You could also create an <literal>addChild()</literal> method of <literal>Parent</literal>."
+msgstr "为了让事情变得井井有条,可以为 <literal>Parent</literal> 加一个 <literal>addChild()</literal> 方法。"
#. Tag: para
#, no-c-format
@@ -230,12 +136,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can address the frustrations of the explicit call to <literal>save()</"
-"literal> by using cascades."
-msgstr ""
-"需要显式调用 <literal>save()</literal> 仍然很麻烦,我们可以用级联来解决这个问"
-"题。 "
+msgid "You can address the frustrations of the explicit call to <literal>save()</literal> by using cascades."
+msgstr "需要显式调用 <literal>save()</literal> 仍然很麻烦,我们可以用级联来解决这个问题。 "
#. Tag: para
#, no-c-format
@@ -244,13 +146,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Similarly, we do not need to iterate over the children when saving or "
-"deleting a <literal>Parent</literal>. The following removes <literal>p</"
-"literal> and all its children from the database."
-msgstr ""
-"同样的,保存或删除 <literal>Parent</literal> 对象的时候并不需要遍历其子对象。"
-"下面的代码会删除对象 <literal>p</literal> 及其所有子对象对应的数据库记录。 "
+msgid "Similarly, we do not need to iterate over the children when saving or deleting a <literal>Parent</literal>. The following removes <literal>p</literal> and all its children from the database."
+msgstr "同样的,保存或删除 <literal>Parent</literal> 对象的时候并不需要遍历其子对象。下面的代码会删除对象 <literal>p</literal> 及其所有子对象对应的数据库记录。 "
#. Tag: para
#, no-c-format
@@ -259,41 +156,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"will not remove <literal>c</literal> from the database. In this case, it "
-"will only remove the link to <literal>p</literal> and cause a <literal>NOT "
-"NULL</literal> constraint violation. You need to explicitly <literal>delete()"
-"</literal> the <literal>Child</literal>."
-msgstr ""
-"不会从数据库删除<literal>c</literal>;它只会删除与 <literal>p</literal> 之间"
-"的连接(并且会导致违反 <literal>NOT NULL</literal> 约束,在这个例子中)。你需"
-"要显式调用 <literal>delete()</literal> 来删除 <literal>Child</literal>。 "
+msgid "will not remove <literal>c</literal> from the database. In this case, it will only remove the link to <literal>p</literal> and cause a <literal>NOT NULL</literal> constraint violation. You need to explicitly <literal>delete()</literal> the <literal>Child</literal>."
+msgstr "不会从数据库删除<literal>c</literal>;它只会删除与 <literal>p</literal> 之间的连接(并且会导致违反 <literal>NOT NULL</literal> 约束,在这个例子中)。你需要显式调用 <literal>delete()</literal> 来删除 <literal>Child</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In our case, a <literal>Child</literal> cannot exist without its parent. So "
-"if we remove a <literal>Child</literal> from the collection, we do want it "
-"to be deleted. To do this, we must use <literal>cascade=\"all-delete-orphan"
-"\"</literal>."
-msgstr ""
-"在我们的例子中,如果没有父对象,子对象就不应该存在,如果将子对象从 "
-"collection 中移除,实际上我们是想删除它。要实现这种要求,就必须使用 "
-"<literal>cascade=\"all-delete-orphan\"</literal>。 "
+msgid "In our case, a <literal>Child</literal> cannot exist without its parent. So if we remove a <literal>Child</literal> from the collection, we do want it to be deleted. To do this, we must use <literal>cascade=\"all-delete-orphan\"</literal>."
+msgstr "在我们的例子中,如果没有父对象,子对象就不应该存在,如果将子对象从 collection 中移除,实际上我们是想删除它。要实现这种要求,就必须使用 <literal>cascade=\"all-delete-orphan\"</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Even though the collection mapping specifies <literal>inverse=\"true\"</"
-"literal>, cascades are still processed by iterating the collection elements. "
-"If you need an object be saved, deleted or updated by cascade, you must add "
-"it to the collection. It is not enough to simply call <literal>setParent()</"
-"literal>."
-msgstr ""
-"注意:即使在 collection 一方的映射中指定 <literal>inverse=\"true\"</"
-"literal>,级联仍然是通过遍历 collection 中的元素来处理的。如果你想要通过级联"
-"进行子对象的插入、删除、更新操作,就必须把它加到 collection 中,只调用 "
-"<literal>setParent()</literal> 是不够的。 "
+msgid "Even though the collection mapping specifies <literal>inverse=\"true\"</literal>, cascades are still processed by iterating the collection elements. If you need an object be saved, deleted or updated by cascade, you must add it to the collection. It is not enough to simply call <literal>setParent()</literal>."
+msgstr "注意:即使在 collection 一方的映射中指定 <literal>inverse=\"true\"</literal>,级联仍然是通过遍历 collection 中的元素来处理的。如果你想要通过级联进行子对象的插入、删除、更新操作,就必须把它加到 collection 中,只调用 <literal>setParent()</literal> 是不够的。 "
#. Tag: title
#, no-c-format
@@ -301,57 +175,19 @@
msgstr "级联与未保存值(<literal>unsaved-value</literal>)"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Suppose we loaded up a <literal>Parent</literal> in one <literal>Session</"
-"literal>, made some changes in a UI action and wanted to persist these "
-"changes in a new session by calling <literal>update()</literal>. The "
-"<literal>Parent</literal> will contain a collection of children and, since "
-"the cascading update is enabled, Hibernate needs to know which children are "
-"newly instantiated and which represent existing rows in the database. We "
-"will also assume that both <literal>Parent</literal> and <literal>Child</"
-"literal> have generated identifier properties of type <literal>Long</"
-"literal>. Hibernate will use the identifier and version/timestamp property "
-"value to determine which of the children are new. (See <xref linkend="
-"\"objectstate-saveorupdate\" />.) <emphasis>In Hibernate3, it is no longer "
-"necessary to specify an <literal>unsaved-value</literal> explicitly.</"
-"emphasis>"
-msgstr ""
-"假设我们从 <literal>Session</literal> 中装入了一个 <literal>Parent</literal> "
-"对象,用户界面对其进行了修改,然后希望在一个新的 Session 里面调用 "
-"<literal>update()</literal> 来保存这些修改。对象 <literal>Parent</literal> 包"
-"含了子对象的集合,由于打开了级联更新,Hibernate 需要知道哪些 Child 对象是新实"
-"例化的,哪些代表数据库中已经存在的记录。我们假设 <literal>Parent</literal> "
-"和 <literal>Child</literal> 对象的标识属性都是自动生成的,类型为 "
-"<literal>java.lang.Long</literal>。Hibernate 会使用标识属性的值,和 version "
-"或 timestamp 属性,来判断哪些子对象是新的。(参见 <xref linkend="
-"\"objectstate-saveorupdate\"/>)<emphasis>在 Hibernate3 中,显式指定 "
-"<literal>unsaved-value</literal> 不再是必须的了。</emphasis>"
+#, no-c-format
+msgid "Suppose we loaded up a <literal>Parent</literal> in one <literal>Session</literal>, made some changes in a UI action and wanted to persist these changes in a new session by calling <literal>update()</literal>. The <literal>Parent</literal> will contain a collection of children and, since the cascading update is enabled, Hibernate needs to know which children are newly instantiated and which represent existing rows in the database. We will also assume that both <literal>Parent</literal> and <literal>Child</literal> have generated identifier properties of type <literal>Long</literal>. Hibernate will use the identifier and version/timestamp property value to determine which of the children are new. (See <xref linkend=\"objectstate-saveorupdate\" />.) <emphasis>In Hibernate3, it is no longer necessary to specify an <literal>unsaved-value</literal> explicitly.</emphasis>"
+msgstr "假设我们从 <literal>Session</literal> 中装入了一个 <literal>Parent</literal> 对象,用户界面对其进行了修改,然后希望在一个新的 Session 里面调用 <literal>update()</literal> 来保存这些修改。对象 <literal>Parent</literal> 包含了子对象的集合,由于打开了级联更新,Hibernate 需要知道哪些 Child 对象是新实例化的,哪些代表数据库中已经存在的记录。我们假设 <literal>Parent</literal> 和 <literal>Child</literal> 对象的标识属性都是自动生成的,类型为 <literal>Long</literal>。Hibernate 会使用标识属性的值,和 version 或 timestamp 属性,来判断哪些子对象是新的。(参见 <xref linkend=\"objectstate-saveorupdate\"/>)<emphasis>在 Hibernate3 中,显式指定 <literal>unsaved-value</literal> 不再是必须的了。</emphasis> "
#. Tag: para
#, no-c-format
-msgid ""
-"The following code will update <literal>parent</literal> and <literal>child</"
-"literal> and insert <literal>newChild</literal>:"
-msgstr ""
-"下面的代码会更新 <literal>parent</literal> 和 <literal>child</literal> 对象,"
-"并且插入 <literal>newChild</literal> 对象。 "
+msgid "The following code will update <literal>parent</literal> and <literal>child</literal> and insert <literal>newChild</literal>:"
+msgstr "下面的代码会更新 <literal>parent</literal> 和 <literal>child</literal> 对象,并且插入 <literal>newChild</literal> 对象。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This may be suitable for the case of a generated identifier, but what about "
-"assigned identifiers and composite identifiers? This is more difficult, "
-"since Hibernate cannot use the identifier property to distinguish between a "
-"newly instantiated object, with an identifier assigned by the user, and an "
-"object loaded in a previous session. In this case, Hibernate will either use "
-"the timestamp or version property, or will actually query the second-level "
-"cache or, worst case, the database, to see if the row exists."
-msgstr ""
-"这对于自动生成标识的情况是非常好的,但是自分配的标识和复合标识怎么办呢?这是"
-"有点麻烦,因为 Hibernate 没有办法区分新实例化的对象(标识被用户指定了)和前一"
-"个 Session 装入的对象。在这种情况下,Hibernate 会使用 timestamp 或 version 属"
-"性,或者查询第二级缓存,或者最坏的情况,查询数据库,来确认是否此行存在。 "
+msgid "This may be suitable for the case of a generated identifier, but what about assigned identifiers and composite identifiers? This is more difficult, since Hibernate cannot use the identifier property to distinguish between a newly instantiated object, with an identifier assigned by the user, and an object loaded in a previous session. In this case, Hibernate will either use the timestamp or version property, or will actually query the second-level cache or, worst case, the database, to see if the row exists."
+msgstr "这对于自动生成标识的情况是非常好的,但是自分配的标识和复合标识怎么办呢?这是有点麻烦,因为 Hibernate 没有办法区分新实例化的对象(标识被用户指定了)和前一个 Session 装入的对象。在这种情况下,Hibernate 会使用 timestamp 或 version 属性,或者查询第二级缓存,或者最坏的情况,查询数据库,来确认是否此行存在。 "
#. Tag: title
#, no-c-format
@@ -360,28 +196,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The sections we have just covered can be a bit confusing. However, in "
-"practice, it all works out nicely. Most Hibernate applications use the "
-"parent/child pattern in many places."
-msgstr ""
-"这里有不少东西需要融会贯通,可能会让新手感到迷惑。但是在实践中它们都工作地非"
-"常好。大部分 Hibernate 应用程序都会经常用到父子对象模式。 "
+msgid "The sections we have just covered can be a bit confusing. However, in practice, it all works out nicely. Most Hibernate applications use the parent/child pattern in many places."
+msgstr "这里有不少东西需要融会贯通,可能会让新手感到迷惑。但是在实践中它们都工作地非常好。大部分 Hibernate 应用程序都会经常用到父子对象模式。 "
#. Tag: para
#, no-c-format
-msgid ""
-"We mentioned an alternative in the first paragraph. None of the above issues "
-"exist in the case of <literal><composite-element></literal> mappings, "
-"which have exactly the semantics of a parent/child relationship. "
-"Unfortunately, there are two big limitations with composite element classes: "
-"composite elements cannot own collections and they should not be the child "
-"of any entity other than the unique parent."
-msgstr ""
-"在第一段中我们曾经提到另一个方案。上面的这些问题都不会出现在 <literal><"
-"composite-element></literal> 映射中,它准确地表达了父子关系的语义。很不幸"
-"复合元素还有两个重大限制:复合元素不能拥有 collections,并且,除了用于惟一的"
-"父对象外,它们不能再作为其它任何实体的子对象。 "
+msgid "We mentioned an alternative in the first paragraph. None of the above issues exist in the case of <literal><composite-element></literal> mappings, which have exactly the semantics of a parent/child relationship. Unfortunately, there are two big limitations with composite element classes: composite elements cannot own collections and they should not be the child of any entity other than the unique parent."
+msgstr "在第一段中我们曾经提到另一个方案。上面的这些问题都不会出现在 <literal><composite-element></literal> 映射中,它准确地表达了父子关系的语义。很不幸复合元素还有两个重大限制:复合元素不能拥有 collections,并且,除了用于惟一的父对象外,它们不能再作为其它任何实体的子对象。 "
#~ msgid ""
#~ "<![CDATA[<set name=\"children\">\n"
@@ -393,7 +214,6 @@
#~ " <key column=\"parent_id\"/>\n"
#~ " <one-to-many class=\"Child\"/>\n"
#~ "</set>]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = .....;\n"
#~ "Child c = new Child();\n"
@@ -406,7 +226,6 @@
#~ "p.getChildren().add(c);\n"
#~ "session.save(c);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[<set name=\"children\">\n"
#~ " <key column=\"parent_id\" not-null=\"true\"/>\n"
@@ -417,14 +236,12 @@
#~ " <key column=\"parent_id\" not-null=\"true\"/>\n"
#~ " <one-to-many class=\"Child\"/>\n"
#~ "</set>]]>"
-
#~ msgid ""
#~ "<![CDATA[<many-to-one name=\"parent\" column=\"parent_id\" not-null=\"true"
#~ "\"/>]]>"
#~ msgstr ""
#~ "<![CDATA[<many-to-one name=\"parent\" column=\"parent_id\" not-null=\"true"
#~ "\"/>]]>"
-
#~ msgid ""
#~ "<![CDATA[<set name=\"children\" inverse=\"true\">\n"
#~ " <key column=\"parent_id\"/>\n"
@@ -435,7 +252,6 @@
#~ " <key column=\"parent_id\"/>\n"
#~ " <one-to-many class=\"Child\"/>\n"
#~ "</set>]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "Child c = new Child();\n"
@@ -450,7 +266,6 @@
#~ "p.getChildren().add(c);\n"
#~ "session.save(c);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[public void addChild(Child c) {\n"
#~ " c.setParent(this);\n"
@@ -461,7 +276,6 @@
#~ " c.setParent(this);\n"
#~ " children.add(c);\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "Child c = new Child();\n"
@@ -474,7 +288,6 @@
#~ "p.addChild(c);\n"
#~ "session.save(c);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[<set name=\"children\" inverse=\"true\" cascade=\"all\">\n"
#~ " <key column=\"parent_id\"/>\n"
@@ -485,7 +298,6 @@
#~ " <key column=\"parent_id\"/>\n"
#~ " <one-to-many class=\"Child\"/>\n"
#~ "</set>]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "Child c = new Child();\n"
@@ -496,7 +308,6 @@
#~ "Child c = new Child();\n"
#~ "p.addChild(c);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "session.delete(p);\n"
@@ -505,7 +316,6 @@
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "session.delete(p);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "Child c = (Child) p.getChildren().iterator().next();\n"
@@ -518,7 +328,6 @@
#~ "p.getChildren().remove(c);\n"
#~ "c.setParent(null);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[Parent p = (Parent) session.load(Parent.class, pid);\n"
#~ "Child c = (Child) p.getChildren().iterator().next();\n"
@@ -531,7 +340,6 @@
#~ "p.getChildren().remove(c);\n"
#~ "session.delete(c);\n"
#~ "session.flush();]]>"
-
#~ msgid ""
#~ "<![CDATA[<set name=\"children\" inverse=\"true\" cascade=\"all-delete-"
#~ "orphan\">\n"
@@ -544,7 +352,6 @@
#~ " <key column=\"parent_id\"/>\n"
#~ " <one-to-many class=\"Child\"/>\n"
#~ "</set>]]>"
-
#~ msgid ""
#~ "<![CDATA[//parent and child were both loaded in a previous session\n"
#~ "parent.addChild(child);\n"
@@ -559,3 +366,4 @@
#~ "parent.addChild(newChild);\n"
#~ "session.update(parent);\n"
#~ "session.flush();]]>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/persistent_classes.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/persistent_classes.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/persistent_classes.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -7,7 +7,7 @@
"Project-Id-Version: persistent_classes\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:15\n"
-"PO-Revision-Date: 2009-12-07 09:36+1000\n"
+"PO-Revision-Date: 2010-03-16 10:00+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -22,30 +22,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Persistent classes are classes in an application that implement the entities "
-"of the business problem (e.g. Customer and Order in an E-commerce "
-"application). Not all instances of a persistent class are considered to be "
-"in the persistent state. For example, an instance can instead be transient "
-"or detached."
-msgstr ""
-"在应用程序中,用来实现业务问题实体的(如,在电子商务应用程序中的 Customer 和 "
-"Order)类就是持久化类。不能认为所有的持久化类的实例都是持久的状态 - 一个实例"
-"的状态也可能是瞬时的或脱管的。"
+msgid "Persistent classes are classes in an application that implement the entities of the business problem (e.g. Customer and Order in an E-commerce application). Not all instances of a persistent class are considered to be in the persistent state. For example, an instance can instead be transient or detached."
+msgstr "在应用程序中,用来实现业务问题实体的(如,在电子商务应用程序中的 Customer 和 Order)类就是持久化类。不能认为所有的持久化类的实例都是持久的状态 - 一个实例的状态也可能是瞬时的或脱管的。"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate works best if these classes follow some simple rules, also known "
-"as the Plain Old Java Object (POJO) programming model. However, none of "
-"these rules are hard requirements. Indeed, Hibernate3 assumes very little "
-"about the nature of your persistent objects. You can express a domain model "
-"in other ways (using trees of <literal>Map</literal> instances, for example)."
-msgstr ""
-"如果这些持久化类遵循一些简单的规则,Hibernate 能够工作得更好,这些规则也被称"
-"作简单传统 Java 对象(POJO:Plain Old Java Object)编程模型。但是这些规则并不"
-"是必需的。 实际上,Hibernate3 对于你的持久化类几乎不做任何设想。你可以用其他"
-"的方法来表达领域模型:比如,使用 <literal>Map</literal> 实例的树型结构。"
+msgid "Hibernate works best if these classes follow some simple rules, also known as the Plain Old Java Object (POJO) programming model. However, none of these rules are hard requirements. Indeed, Hibernate3 assumes very little about the nature of your persistent objects. You can express a domain model in other ways (using trees of <literal>Map</literal> instances, for example)."
+msgstr "如果这些持久化类遵循一些简单的规则,Hibernate 能够工作得更好,这些规则也被称作简单传统 Java 对象(POJO:Plain Old Java Object)编程模型。但是这些规则并不是必需的。 实际上,Hibernate3 对于你的持久化类几乎不做任何设想。你可以用其他的方法来表达领域模型:比如,使用 <literal>Map</literal> 实例的树型结构。"
#. Tag: title
#, no-c-format
@@ -54,16 +37,12 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Most Java applications require a persistent class representing felines. For "
-"example:"
+msgid "Most Java applications require a persistent class representing felines. For example:"
msgstr "大多数 Java 程序需要用一个持久化类来表示猫科动物。例如:"
#. Tag: para
#, no-c-format
-msgid ""
-"The four main rules of persistent classes are explored in more detail in the "
-"following sections."
+msgid "The four main rules of persistent classes are explored in more detail in the following sections."
msgstr "在后续的章节里我们将介绍持久性类的 4 个主要规则的更多细节。"
#. Tag: title
@@ -73,19 +52,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>Cat</literal> has a no-argument constructor. All persistent classes "
-"must have a default constructor (which can be non-public) so that Hibernate "
-"can instantiate them using <literal>Constructor.newInstance()</literal>. It "
-"is recommended that you have a default constructor with at least "
-"<emphasis>package</emphasis> visibility for runtime proxy generation in "
-"Hibernate."
-msgstr ""
-"<literal>Cat</literal> 有一个无参数的构造方法。所有的持久化类都必须有一个默认"
-"的构造方法(可以不是 public 的),这样的话 Hibernate 就可以使用 "
-"<literal>Constructor.newInstance()</literal>来实例化它们。 我们强烈建议,在 "
-"Hibernate 中,为了运行期代理的生成,构造方法至少是<emphasis>包(package)</"
-"emphasis>内可见的。"
+msgid "<literal>Cat</literal> has a no-argument constructor. All persistent classes must have a default constructor (which can be non-public) so that Hibernate can instantiate them using <literal>Constructor.newInstance()</literal>. It is recommended that you have a default constructor with at least <emphasis>package</emphasis> visibility for runtime proxy generation in Hibernate."
+msgstr "<literal>Cat</literal> 有一个无参数的构造方法。所有的持久化类都必须有一个默认的构造方法(可以不是 public 的),这样的话 Hibernate 就可以使用 <literal>Constructor.newInstance()</literal>来实例化它们。 我们强烈建议,在 Hibernate 中,为了运行期代理的生成,构造方法至少是<emphasis>包(package)</emphasis>内可见的。"
#. Tag: title
#, no-c-format
@@ -94,65 +62,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>Cat</literal> has a property called <literal>id</literal>. This "
-"property maps to the primary key column of a database table. The property "
-"might have been called anything, and its type might have been any primitive "
-"type, any primitive \"wrapper\" type, <literal>java.lang.String</literal> or "
-"<literal>java.util.Date</literal>. If your legacy database table has "
-"composite keys, you can use a user-defined class with properties of these "
-"types (see the section on composite identifiers later in the chapter.)"
-msgstr ""
-"<literal>Cat</literal> 有一个属性叫做 <literal>id</literal>。这个属性映射数据"
-"库表的主 键字段。这个属性可以叫任何名字,其类型可以是任何的原始类型、原始类型"
-"的包装类型、 <literal>java.lang.String</literal> 或者是 <literal>java.util."
-"Date</literal>。(如果你的遗留数据库表有联合主键,你甚至可以用一个用户自定义"
-"的类,该类拥有这些类型的属性。参见后面的关于联合标识符的章节。)"
+msgid "<literal>Cat</literal> has a property called <literal>id</literal>. This property maps to the primary key column of a database table. The property might have been called anything, and its type might have been any primitive type, any primitive \"wrapper\" type, <literal>java.lang.String</literal> or <literal>java.util.Date</literal>. If your legacy database table has composite keys, you can use a user-defined class with properties of these types (see the section on composite identifiers later in the chapter.)"
+msgstr "<literal>Cat</literal> 有一个属性叫做 <literal>id</literal>。这个属性映射数据库表的主 键字段。这个属性可以叫任何名字,其类型可以是任何的原始类型、原始类型的包装类型、 <literal>java.lang.String</literal> 或者是 <literal>java.util.Date</literal>。(如果你的遗留数据库表有联合主键,你甚至可以用一个用户自定义的类,该类拥有这些类型的属性。参见后面的关于联合标识符的章节。)"
#. Tag: para
#, no-c-format
-msgid ""
-"The identifier property is strictly optional. You can leave them off and let "
-"Hibernate keep track of object identifiers internally. We do not recommend "
-"this, however."
-msgstr ""
-"标识符属性是可选的。可以不用管它,让 Hibernate 内部来追踪对象的识别。 但是我"
-"们并不推荐这样做。"
+msgid "The identifier property is strictly optional. You can leave them off and let Hibernate keep track of object identifiers internally. We do not recommend this, however."
+msgstr "标识符属性是可选的。可以不用管它,让 Hibernate 内部来追踪对象的识别。 但是我们并不推荐这样做。"
#. Tag: para
#, no-c-format
-msgid ""
-"In fact, some functionality is available only to classes that declare an "
-"identifier property:"
+msgid "In fact, some functionality is available only to classes that declare an identifier property:"
msgstr "实际上,一些功能只对那些声明了标识符属性的类起作用: "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Transitive reattachment for detached objects (cascade update or cascade "
-"merge) - see <xref linkend=\"objectstate-transitive\" />"
-msgstr ""
-"托管对象的传播性再连接(级联更新或级联合并) — 参阅 <xref linkend="
-"\"objectstate-transitive\"/>"
+#, no-c-format
+msgid "Transitive reattachment for detached objects (cascade update or cascade merge) - see <xref linkend=\"objectstate-transitive\" />"
+msgstr "托管对象的传播性再连接(级联更新或级联合并)- 参阅 <xref linkend=\"objectstate-transitive\"/>"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>Session.saveOrUpdate()</literal>"
-msgstr "<literal>Session.saveOrUpdate()</literal>"
+msgstr "<literal>Session.saveOrUpdate()</literal> "
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>Session.merge()</literal>"
-msgstr "<literal>Session.merge()</literal>"
+msgstr "<literal>Session.merge()</literal> "
#. Tag: para
#, no-c-format
-msgid ""
-"We recommend that you declare consistently-named identifier properties on "
-"persistent classes and that you use a nullable (i.e., non-primitive) type."
-msgstr ""
-"我们建议你对持久化类声明命名一致的标识属性。我们还建议你使用一个可以为空(也"
-"就是说,不是原始类型)的类型。 "
+msgid "We recommend that you declare consistently-named identifier properties on persistent classes and that you use a nullable (i.e., non-primitive) type."
+msgstr "我们建议你对持久化类声明命名一致的标识属性。我们还建议你使用一个可以为空(也就是说,不是原始类型)的类型。 "
#. Tag: title
#, no-c-format
@@ -161,72 +102,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A central feature of Hibernate, <emphasis>proxies</emphasis>, depends upon "
-"the persistent class being either non-final, or the implementation of an "
-"interface that declares all public methods."
-msgstr ""
-"<emphasis>代理(proxies)</emphasis>是 Hibernate 的一个重要的功能,它依赖的条"
-"件是,持久化类或者是非 final 的,或者是实现了一个所有方法都声明为 public 的接"
-"口。"
+msgid "A central feature of Hibernate, <emphasis>proxies</emphasis>, depends upon the persistent class being either non-final, or the implementation of an interface that declares all public methods."
+msgstr "<emphasis>代理(proxies)</emphasis>是 Hibernate 的一个重要的功能,它依赖的条件是,持久化类或者是非 final 的,或者是实现了一个所有方法都声明为 public 的接口。"
#. Tag: para
#, no-c-format
-msgid ""
-"You can persist <literal>final</literal> classes that do not implement an "
-"interface with Hibernate. You will not, however, be able to use proxies for "
-"lazy association fetching which will ultimately limit your options for "
-"performance tuning."
-msgstr ""
-"你可以用 Hibernate 持久化一个没有实现任何接口的 <literal>final</literal> 类,"
-"但是你不能使用代理来延迟关联加载,这会限制你进行性能优化的选择。 "
+msgid "You can persist <literal>final</literal> classes that do not implement an interface with Hibernate. You will not, however, be able to use proxies for lazy association fetching which will ultimately limit your options for performance tuning."
+msgstr "你可以用 Hibernate 持久化一个没有实现任何接口的 <literal>final</literal> 类,但是你不能使用代理来延迟关联加载,这会限制你进行性能优化的选择。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You should also avoid declaring <literal>public final</literal> methods on "
-"the non-final classes. If you want to use a class with a <literal>public "
-"final</literal> method, you must explicitly disable proxying by setting "
-"<literal>lazy=\"false\"</literal>."
-msgstr ""
-"你也应该避免在非 final 类中声明 <literal>public final</literal> 的方法。如果"
-"你想使用一个有 <literal>public final</literal> 方法的类,你必须通过设置 "
-"<literal>lazy=\"false\"</literal> 来明确地禁用代理。 "
+msgid "You should also avoid declaring <literal>public final</literal> methods on the non-final classes. If you want to use a class with a <literal>public final</literal> method, you must explicitly disable proxying by setting <literal>lazy=\"false\"</literal>."
+msgstr "你也应该避免在非 final 类中声明 <literal>public final</literal> 的方法。如果你想使用一个有 <literal>public final</literal> 方法的类,你必须通过设置 <literal>lazy=\"false\"</literal> 来明确地禁用代理。 "
#. Tag: title
#, no-c-format
msgid "Declare accessors and mutators for persistent fields (optional)"
-msgstr ""
-"为持久化字段声明访问器(accessors)和是否可变的标志(mutators)(可选)"
+msgstr "为持久化字段声明访问器(accessors)和是否可变的标志(mutators)(可选)"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>Cat</literal> declares accessor methods for all its persistent "
-"fields. Many other ORM tools directly persist instance variables. It is "
-"better to provide an indirection between the relational schema and internal "
-"data structures of the class. By default, Hibernate persists JavaBeans style "
-"properties and recognizes method names of the form <literal>getFoo</"
-"literal>, <literal>isFoo</literal> and <literal>setFoo</literal>. If "
-"required, you can switch to direct field access for particular properties."
-msgstr ""
-"<literal>Cat</literal> 为它的所有持久化字段声明了访问方法。很多其他 ORM 工具"
-"直接对实例变量进行持久化。我们相信,在关系数据库 schema 和类的内部数据结构之"
-"间引入间接层(原文为\"非直接\",indirection)会好一些。默认情况下 Hibernate "
-"持久化 JavaBeans 风格的属性,认可 <literal>getFoo</literal>,"
-"<literal>isFoo</literal> 和 <literal>setFoo</literal> 这种形式的方法名。如果"
-"需要,你可以对某些特定属性实行直接字段访问。 "
+msgid "<literal>Cat</literal> declares accessor methods for all its persistent fields. Many other ORM tools directly persist instance variables. It is better to provide an indirection between the relational schema and internal data structures of the class. By default, Hibernate persists JavaBeans style properties and recognizes method names of the form <literal>getFoo</literal>, <literal>isFoo</literal> and <literal>setFoo</literal>. If required, you can switch to direct field access for particular properties."
+msgstr "<literal>Cat</literal> 为它的所有持久化字段声明了访问方法。很多其他 ORM 工具直接对实例变量进行持久化。我们相信,在关系数据库 schema 和类的内部数据结构之间引入间接层(原文为\"非直接\",indirection)会好一些。默认情况下 Hibernate 持久化 JavaBeans 风格的属性,认可 <literal>getFoo</literal>,<literal>isFoo</literal> 和 <literal>setFoo</literal> 这种形式的方法名。如果需要,你可以对某些特定属性实行直接字段访问。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Properties need <emphasis>not</emphasis> be declared public - Hibernate can "
-"persist a property with a default, <literal>protected</literal> or "
-"<literal>private</literal> get / set pair."
-msgstr ""
-"属性<emphasis>不需要</emphasis>要声明为 public 的。Hibernate 可以持久化一个"
-"有 <literal>default</literal>、<literal>protected</literal> 或 "
-"<literal>private</literal> 的 get/set 方法对的属性进行持久化。"
+msgid "Properties need <emphasis>not</emphasis> be declared public - Hibernate can persist a property with a default, <literal>protected</literal> or <literal>private</literal> get / set pair."
+msgstr "属性<emphasis>不需要</emphasis>要声明为 public 的。Hibernate 可以持久化一个有 <literal>default</literal>、<literal>protected</literal> 或 <literal>private</literal> 的 get/set 方法对的属性进行持久化。"
#. Tag: title
#, no-c-format
@@ -235,38 +137,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A subclass must also observe the first and second rules. It inherits its "
-"identifier property from the superclass, <literal>Cat</literal>. For example:"
-msgstr ""
-"子类也必须遵守第一条和第二条规则。它从超类 <literal>Cat</literal> 继承了标识"
-"属性。例如:"
+msgid "A subclass must also observe the first and second rules. It inherits its identifier property from the superclass, <literal>Cat</literal>. For example:"
+msgstr "子类也必须遵守第一条和第二条规则。它从超类 <literal>Cat</literal> 继承了标识属性。例如:"
#. Tag: title
#, no-c-format
-msgid ""
-"Implementing <literal>equals()</literal> and <literal>hashCode()</literal>"
-msgstr ""
-"实现 <literal>equals()</literal> 和 <literal>hashCode()</literal> 方法:"
+msgid "Implementing <literal>equals()</literal> and <literal>hashCode()</literal>"
+msgstr "实现 <literal>equals()</literal> 和 <literal>hashCode()</literal> 方法:"
#. Tag: para
#, no-c-format
-msgid ""
-"You have to override the <literal>equals()</literal> and <literal>hashCode()"
-"</literal> methods if you:"
-msgstr ""
-"如果你有如下需求,你必须重载 <literal>equals()</literal> 和 <literal>hashCode"
-"()</literal> 方法: "
+msgid "You have to override the <literal>equals()</literal> and <literal>hashCode()</literal> methods if you:"
+msgstr "如果你有如下需求,你必须重载 <literal>equals()</literal> 和 <literal>hashCode()</literal> 方法: "
#. Tag: para
#, no-c-format
-msgid ""
-"intend to put instances of persistent classes in a <literal>Set</literal> "
-"(the recommended way to represent many-valued associations); <emphasis>and</"
-"emphasis>"
-msgstr ""
-"想把持久类的实例放入 <literal>Set</literal> 中(当表示多值关联时,推荐这么"
-"做),<emphasis>而且</emphasis>"
+msgid "intend to put instances of persistent classes in a <literal>Set</literal> (the recommended way to represent many-valued associations); <emphasis>and</emphasis>"
+msgstr "想把持久类的实例放入 <literal>Set</literal> 中(当表示多值关联时,推荐这么做),<emphasis>而且</emphasis>"
#. Tag: para
#, no-c-format
@@ -275,73 +162,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate guarantees equivalence of persistent identity (database row) and "
-"Java identity only inside a particular session scope. When you mix instances "
-"retrieved in different sessions, you must implement <literal>equals()</"
-"literal> and <literal>hashCode()</literal> if you wish to have meaningful "
-"semantics for <literal>Set</literal>s."
-msgstr ""
-"Hibernate 保证,仅在特定会话范围内,持久化标识(数据库的行)和 Java 标识是等"
-"价的。因此,一旦我们混合了从不同会话中获取的实例,如果希望 <literal>Set</"
-"literal> 有明确的语义,就必须实现 <literal>equals()</literal> 和 "
-"<literal>hashCode()</literal>。 "
+msgid "Hibernate guarantees equivalence of persistent identity (database row) and Java identity only inside a particular session scope. When you mix instances retrieved in different sessions, you must implement <literal>equals()</literal> and <literal>hashCode()</literal> if you wish to have meaningful semantics for <literal>Set</literal>s."
+msgstr "Hibernate 保证,仅在特定会话范围内,持久化标识(数据库的行)和 Java 标识是等价的。因此,一旦我们混合了从不同会话中获取的实例,如果希望 <literal>Set</literal> 有明确的语义,就必须实现 <literal>equals()</literal> 和 <literal>hashCode()</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The most obvious way is to implement <literal>equals()</literal>/"
-"<literal>hashCode()</literal> by comparing the identifier value of both "
-"objects. If the value is the same, both must be the same database row, "
-"because they are equal. If both are added to a <literal>Set</literal>, you "
-"will only have one element in the <literal>Set</literal>). Unfortunately, "
-"you cannot use that approach with generated identifiers. Hibernate will only "
-"assign identifier values to objects that are persistent; a newly created "
-"instance will not have any identifier value. Furthermore, if an instance is "
-"unsaved and currently in a <literal>Set</literal>, saving it will assign an "
-"identifier value to the object. If <literal>equals()</literal> and "
-"<literal>hashCode()</literal> are based on the identifier value, the hash "
-"code would change, breaking the contract of the <literal>Set</literal>. See "
-"the Hibernate website for a full discussion of this problem. This is not a "
-"Hibernate issue, but normal Java semantics of object identity and equality."
-msgstr ""
-"实现 <literal>equals()</literal>/<literal>hashCode()</literal> 最显而易见的方"
-"法是比较两个对象 标识符的值。如果值相同,则两个对象对应于数据库的同一行,因此"
-"它们是相等的(如果都被添加到 <literal>Set</literal>,则在 <literal>Set</"
-"literal> 中只有一个元素)。不幸的是,对生成的标识不能 使用这种方法。"
-"Hibernate 仅对那些持久化对象赋标识值,一个新创建的实例将不会有任何标识值。此"
-"外, 如果一个实例没有被保存(unsaved),并且它当前正在一个 <literal>Set</"
-"literal> 中,保存它将会给这个对象赋一个标识值。如果 <literal>equals()</"
-"literal> 和 <literal>hashCode()</literal> 是基于标识值 实现的,则其哈希码将会"
-"改变,这违反了 <literal>Set</literal> 的契约。建议去 Hibernate 的站点阅读关于"
-"这个问题的全部讨论。注意,这不是 Hibernate 的问题,而是一般的 Java 对象标识"
-"和 Java 对象等价的语义问题。 "
+msgid "The most obvious way is to implement <literal>equals()</literal>/<literal>hashCode()</literal> by comparing the identifier value of both objects. If the value is the same, both must be the same database row, because they are equal. If both are added to a <literal>Set</literal>, you will only have one element in the <literal>Set</literal>). Unfortunately, you cannot use that approach with generated identifiers. Hibernate will only assign identifier values to objects that are persistent; a newly created instance will not have any identifier value. Furthermore, if an instance is unsaved and currently in a <literal>Set</literal>, saving it will assign an identifier value to the object. If <literal>equals()</literal> and <literal>hashCode()</literal> are based on the identifier value, the hash code would change, breaking the contract of the <literal>Set</literal>. See the Hibernate website for a full discussion of this problem. This is not a Hibernate issue, but normal Ja!
va semantics of object identity and equality."
+msgstr "实现 <literal>equals()</literal>/<literal>hashCode()</literal> 最显而易见的方法是比较两个对象 标识符的值。如果值相同,则两个对象对应于数据库的同一行,因此它们是相等的(如果都被添加到 <literal>Set</literal>,则在 <literal>Set</literal> 中只有一个元素)。不幸的是,对生成的标识不能 使用这种方法。Hibernate 仅对那些持久化对象赋标识值,一个新创建的实例将不会有任何标识值。此外, 如果一个实例没有被保存(unsaved),并且它当前正在一个 <literal>Set</literal> 中,保存它将会给这个对象赋一个标识值。如果 <literal>equals()</literal> 和 <literal>hashCode()</literal> 是基于标识值 实现的,则其哈希码将会改变,这违反了 <literal>Set</literal> 的契约。建议去 Hibernate 的站点阅读关于这个问题的全部讨论。注意,这不是 Hibernate 的问题,而是一般的 Java 对象标!
识和 Java 对象等价的语义问题。 "
#. Tag: para
#, no-c-format
-msgid ""
-"It is recommended that you implement <literal>equals()</literal> and "
-"<literal>hashCode()</literal> using <emphasis>Business key equality</"
-"emphasis>. Business key equality means that the <literal>equals()</literal> "
-"method compares only the properties that form the business key. It is a key "
-"that would identify our instance in the real world (a <emphasis>natural</"
-"emphasis> candidate key):"
-msgstr ""
-"我们建议使用<emphasis>业务键值相等(Business key equality)</emphasis>来实现 "
-"<literal>equals()</literal> 和 <literal>hashCode()</literal>。业务键值相等的"
-"意思是,<literal>equals()</literal> 方法仅仅比较形成业务键的属性,它能在现实"
-"世界里标识我们的实例(是一个<emphasis>自然的</emphasis>候选码)。"
+msgid "It is recommended that you implement <literal>equals()</literal> and <literal>hashCode()</literal> using <emphasis>Business key equality</emphasis>. Business key equality means that the <literal>equals()</literal> method compares only the properties that form the business key. It is a key that would identify our instance in the real world (a <emphasis>natural</emphasis> candidate key):"
+msgstr "我们建议使用<emphasis>业务键值相等(Business key equality)</emphasis>来实现 <literal>equals()</literal> 和 <literal>hashCode()</literal>。业务键值相等的意思是,<literal>equals()</literal> 方法仅仅比较形成业务键的属性,它能在现实世界里标识我们的实例(是一个<emphasis>自然的</emphasis>候选码)。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"A business key does not have to be as solid as a database primary key "
-"candidate (see <xref linkend=\"transactions-basics-identity\" />). Immutable "
-"or unique properties are usually good candidates for a business key."
-msgstr ""
-"注意,业务键不必像数据库的主键那样固定不变(参见 <xref linkend="
-"\"transactions-basics-identity\"/>)。对业务键而言,不可变或唯一的属性是不错"
-"的选择。 "
+#, no-c-format
+msgid "A business key does not have to be as solid as a database primary key candidate (see <xref linkend=\"transactions-basics-identity\" />). Immutable or unique properties are usually good candidates for a business key."
+msgstr "注意,业务键不必像数据库的主键那样固定不变(参见 <xref linkend=\"transactions-basics-identity\"/>)。对业务键而言,不可变或唯一的属性是不错的选择。"
#. Tag: title
#, no-c-format
@@ -354,117 +191,54 @@
msgstr "注意"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"<emphasis>The following features are currently considered experimental and "
-"may change in the near future.</emphasis>"
-msgstr ""
-"<emphasis>注意,以下特性在当前处于试验阶段,将来可能会有变化。</emphasis>"
+#, no-c-format
+msgid "<emphasis>The following features are currently considered experimental and may change in the near future.</emphasis>"
+msgstr "<emphasis>注意,以下特性在当前处于试验阶段,将来可能会有变化。</emphasis> "
#. Tag: para
#, no-c-format
-msgid ""
-"Persistent entities do not necessarily have to be represented as POJO "
-"classes or as JavaBean objects at runtime. Hibernate also supports dynamic "
-"models (using <literal>Map</literal>s of <literal>Map</literal>s at runtime) "
-"and the representation of entities as DOM4J trees. With this approach, you "
-"do not write persistent classes, only mapping files."
-msgstr ""
-"运行期的持久化实体没有必要一定表示为像 POJO 类或 JavaBean 对象那样的形式。"
-"Hibernate 也支持动态模型 (在运行期使用 <literal>Map</literal> 的 "
-"<literal>Map</literal>)和象 DOM4J 的树模型那样的实体表示。使用这种方法,你不"
-"用写持久化类,只写映射文件就行了。 "
+msgid "Persistent entities do not necessarily have to be represented as POJO classes or as JavaBean objects at runtime. Hibernate also supports dynamic models (using <literal>Map</literal>s of <literal>Map</literal>s at runtime) and the representation of entities as DOM4J trees. With this approach, you do not write persistent classes, only mapping files."
+msgstr "运行期的持久化实体没有必要一定表示为像 POJO 类或 JavaBean 对象那样的形式。Hibernate 也支持动态模型 (在运行期使用 <literal>Map</literal> 的 <literal>Map</literal>)和象 DOM4J 的树模型那样的实体表示。使用这种方法,你不用写持久化类,只写映射文件就行了。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"By default, Hibernate works in normal POJO mode. You can set a default "
-"entity representation mode for a particular <literal>SessionFactory</"
-"literal> using the <literal>default_entity_mode</literal> configuration "
-"option (see <xref linkend=\"configuration-optional-properties\" />)."
-msgstr ""
-"Hibernate 默认工作在普通 POJO 模式。你可以使用配置选项 "
-"<literal>default_entity_mode</literal>, 对特定的 <literal>SessionFactory</"
-"literal>,设置一个默认的实体表示模式。(参见 <xref linkend=\"configuration-"
-"optional-properties\"/>) "
+#, no-c-format
+msgid "By default, Hibernate works in normal POJO mode. You can set a default entity representation mode for a particular <literal>SessionFactory</literal> using the <literal>default_entity_mode</literal> configuration option (see <xref linkend=\"configuration-optional-properties\" />)."
+msgstr "Hibernate 默认工作在普通 POJO 模式。你可以使用配置选项 <literal>default_entity_mode</literal>, 对特定的 <literal>SessionFactory</literal>,设置一个默认的实体表示模式。(参见 <xref linkend=\"configuration-optional-properties\"/>)。"
#. Tag: para
#, no-c-format
-msgid ""
-"The following examples demonstrate the representation using <literal>Map</"
-"literal>s. First, in the mapping file an <literal>entity-name</literal> has "
-"to be declared instead of, or in addition to, a class name:"
-msgstr ""
-"下面是用 <literal>Map</literal> 来表示的例子。首先,在映射文件中,要声明 "
-"<literal>entity-name</literal> 来代替一个类名(或作为一种附属)。 "
+msgid "The following examples demonstrate the representation using <literal>Map</literal>s. First, in the mapping file an <literal>entity-name</literal> has to be declared instead of, or in addition to, a class name:"
+msgstr "下面是用 <literal>Map</literal> 来表示的例子。首先,在映射文件中,要声明 <literal>entity-name</literal> 来代替一个类名(或作为一种附属)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Even though associations are declared using target class names, the target "
-"type of associations can also be a dynamic entity instead of a POJO."
-msgstr ""
-"注意,虽然是用目标类名来声明关联的,但是关联的目标类型除了是 POJO 之外,也可"
-"以是一个动态的实体。 "
+msgid "Even though associations are declared using target class names, the target type of associations can also be a dynamic entity instead of a POJO."
+msgstr "注意,虽然是用目标类名来声明关联的,但是关联的目标类型除了是 POJO 之外,也可以是一个动态的实体。 "
#. Tag: para
#, no-c-format
-msgid ""
-"After setting the default entity mode to <literal>dynamic-map</literal> for "
-"the <literal>SessionFactory</literal>, you can, at runtime, work with "
-"<literal>Map</literal>s of <literal>Map</literal>s:"
-msgstr ""
-"在使用 <literal>dynamic-map</literal> 为 <literal>SessionFactory</literal> 设"
-"置了默认的实体模式之后,可以在运行期使用 <literal>Map</literal> 的 "
-"<literal>Map</literal>:"
+msgid "After setting the default entity mode to <literal>dynamic-map</literal> for the <literal>SessionFactory</literal>, you can, at runtime, work with <literal>Map</literal>s of <literal>Map</literal>s:"
+msgstr "在使用 <literal>dynamic-map</literal> 为 <literal>SessionFactory</literal> 设置了默认的实体模式之后,可以在运行期使用 <literal>Map</literal> 的 <literal>Map</literal>:"
#. Tag: para
#, no-c-format
-msgid ""
-"One of the main advantages of dynamic mapping is quick turnaround time for "
-"prototyping, without the need for entity class implementation. However, you "
-"lose compile-time type checking and will likely deal with many exceptions at "
-"runtime. As a result of the Hibernate mapping, the database schema can "
-"easily be normalized and sound, allowing to add a proper domain model "
-"implementation on top later on."
-msgstr ""
-"动态映射的好处是,变化所需要的时间少了,因为原型不需要实现实体类。然而,你无"
-"法进行编译期的类型检查,并可能由此会处理很多的运行期异常。幸亏有了 Hibernate "
-"映射,它使得数据库的 schema 能容易的规格化和合理化,并允许稍后在此之上添加合"
-"适的领域模型实现。 "
+msgid "One of the main advantages of dynamic mapping is quick turnaround time for prototyping, without the need for entity class implementation. However, you lose compile-time type checking and will likely deal with many exceptions at runtime. As a result of the Hibernate mapping, the database schema can easily be normalized and sound, allowing to add a proper domain model implementation on top later on."
+msgstr "动态映射的好处是,变化所需要的时间少了,因为原型不需要实现实体类。然而,你无法进行编译期的类型检查,并可能由此会处理很多的运行期异常。幸亏有了 Hibernate 映射,它使得数据库的 schema 能容易的规格化和合理化,并允许稍后在此之上添加合适的领域模型实现。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Entity representation modes can also be set on a per <literal>Session</"
-"literal> basis:"
+msgid "Entity representation modes can also be set on a per <literal>Session</literal> basis:"
msgstr "实体表示模式也能在每个 <literal>Session</literal> 的基础上设置:"
#. Tag: para
#, no-c-format
-msgid ""
-"Please note that the call to <literal>getSession()</literal> using an "
-"<literal>EntityMode</literal> is on the <literal>Session</literal> API, not "
-"the <literal>SessionFactory</literal>. That way, the new <literal>Session</"
-"literal> shares the underlying JDBC connection, transaction, and other "
-"context information. This means you do not have to call <literal>flush()</"
-"literal> and <literal>close()</literal> on the secondary <literal>Session</"
-"literal>, and also leave the transaction and connection handling to the "
-"primary unit of work."
-msgstr ""
-"请注意,用 <literal>EntityMode</literal> 调用 <literal>getSession()</"
-"literal> 是在 <literal>Session</literal> 的 API 中,而不是 "
-"<literal>SessionFactory</literal>。 这样,新的 <literal>Session</literal> 共"
-"享底层的 JDBC 连接,事务,和其他的上下文信息。这意味着,你不需要在第二个 "
-"<literal>Session</literal> 中调用 <literal>flush()</literal> 和 "
-"<literal>close()</literal>,同样的,把事务和连接的处理交给原来的工作单元。 "
+msgid "Please note that the call to <literal>getSession()</literal> using an <literal>EntityMode</literal> is on the <literal>Session</literal> API, not the <literal>SessionFactory</literal>. That way, the new <literal>Session</literal> shares the underlying JDBC connection, transaction, and other context information. This means you do not have to call <literal>flush()</literal> and <literal>close()</literal> on the secondary <literal>Session</literal>, and also leave the transaction and connection handling to the primary unit of work."
+msgstr "请注意,用 <literal>EntityMode</literal> 调用 <literal>getSession()</literal> 是在 <literal>Session</literal> 的 API 中,而不是 <literal>SessionFactory</literal>。 这样,新的 <literal>Session</literal> 共享底层的 JDBC 连接,事务,和其他的上下文信息。这意味着,你不需要在第二个 <literal>Session</literal> 中调用 <literal>flush()</literal> 和 <literal>close()</literal>,同样的,把事务和连接的处理交给原来的工作单元。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"More information about the XML representation capabilities can be found in "
-"<xref linkend=\"xml\" />."
-msgstr "关于 XML 表示能力的更多信息可以在 <xref linkend=\"xml\"/> 中找到。 "
+#, no-c-format
+msgid "More information about the XML representation capabilities can be found in <xref linkend=\"xml\" />."
+msgstr "关于 XML 表示能力的更多信息可以在 <xref linkend=\"xml\"/> 中找到。"
#. Tag: title
#, no-c-format
@@ -473,61 +247,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>org.hibernate.tuple.Tuplizer</literal>, and its sub-interfaces, are "
-"responsible for managing a particular representation of a piece of data "
-"given that representation's <literal>org.hibernate.EntityMode</literal>. If "
-"a given piece of data is thought of as a data structure, then a tuplizer is "
-"the thing that knows how to create such a data structure and how to extract "
-"values from and inject values into such a data structure. For example, for "
-"the POJO entity mode, the corresponding tuplizer knows how create the POJO "
-"through its constructor. It also knows how to access the POJO properties "
-"using the defined property accessors."
-msgstr ""
-"<literal>org.hibernate.tuple.Tuplizer</literal>,以及其子接口,负责根据给定的"
-"<literal>org.hibernate.EntityMode</literal>,来复现片断数据。如果给定的片断数"
-"据被认为其是一种数据结构,\"tuplizer\" 就是一个知道如何创建这样的数据结构,以"
-"及如何给这个数据结构赋值的东西。比如说,对于 POJO 这种 Entity Mode,对应的 "
-"tuplizer 知道通过其构造方法来创建一个 POJO,再通过其属性访问器来访问 POJO 属"
-"性。有两大类高层 Tuplizer,分别是<literal>org.hibernate.tuple.entity."
-"EntityTuplizer</literal> 和 <literal>org.hibernate.tuple.entity."
-"ComponentTuplizer</literal> 接口。<literal>EntityTuplizer</literal> 负责管理"
-"上面提到的实体的契约,而 <literal>ComponentTuplizer</literal> 则是针对组件"
-"的。 "
+msgid "<literal>org.hibernate.tuple.Tuplizer</literal>, and its sub-interfaces, are responsible for managing a particular representation of a piece of data given that representation's <literal>org.hibernate.EntityMode</literal>. If a given piece of data is thought of as a data structure, then a tuplizer is the thing that knows how to create such a data structure and how to extract values from and inject values into such a data structure. For example, for the POJO entity mode, the corresponding tuplizer knows how create the POJO through its constructor. It also knows how to access the POJO properties using the defined property accessors."
+msgstr "<literal>org.hibernate.tuple.Tuplizer</literal>,以及其子接口,负责根据给定的<literal>org.hibernate.EntityMode</literal>,来复现片断数据。如果给定的片断数据被认为其是一种数据结构,\"tuplizer\" 就是一个知道如何创建这样的数据结构,以及如何给这个数据结构赋值的东西。比如说,对于 POJO 这种 Entity Mode,对应的 tuplizer 知道通过其构造方法来创建一个 POJO,再通过其属性访问器来访问 POJO 属性。有两大类高层 Tuplizer,分别是<literal>org.hibernate.tuple.entity.EntityTuplizer</literal> 和 <literal>org.hibernate.tuple.entity.ComponentTuplizer</literal> 接口。<literal>EntityTuplizer</literal> 负责管理上面提到的实体的契约,而 <literal>ComponentTuplizer</literal> 则是针对组件的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"There are two high-level types of Tuplizers, represented by the <literal>org."
-"hibernate.tuple.entity.EntityTuplizer</literal> and <literal>org.hibernate."
-"tuple.component.ComponentTuplizer</literal> interfaces. "
-"<literal>EntityTuplizer</literal>s are responsible for managing the above "
-"mentioned contracts in regards to entities, while "
-"<literal>ComponentTuplizer</literal>s do the same for components."
-msgstr ""
-"有两种高层类型的 Tuplizer,分别由 <literal>org.hibernate.tuple.entity."
-"EntityTuplizer</literal> 和 <literal>org.hibernate.tuple.component."
-"ComponentTuplizer</literal> 接口代表。<literal>EntityTuplizer</literal> 负责"
-"管理和实体相关的上述合约,而<literal>ComponentTuplizer</literal> 则负责组件。"
+msgid "There are two high-level types of Tuplizers, represented by the <literal>org.hibernate.tuple.entity.EntityTuplizer</literal> and <literal>org.hibernate.tuple.component.ComponentTuplizer</literal> interfaces. <literal>EntityTuplizer</literal>s are responsible for managing the above mentioned contracts in regards to entities, while <literal>ComponentTuplizer</literal>s do the same for components."
+msgstr "有两种高层类型的 Tuplizer,分别由 <literal>org.hibernate.tuple.entity.EntityTuplizer</literal> 和 <literal>org.hibernate.tuple.component.ComponentTuplizer</literal> 接口代表。<literal>EntityTuplizer</literal> 负责管理和实体相关的上述合约,而<literal>ComponentTuplizer</literal> 则负责组件。"
#. Tag: para
#, no-c-format
-msgid ""
-"Users can also plug in their own tuplizers. Perhaps you require that a "
-"<literal>java.util.Map</literal> implementation other than <literal>java."
-"util.HashMap</literal> be used while in the dynamic-map entity-mode. Or "
-"perhaps you need to define a different proxy generation strategy than the "
-"one used by default. Both would be achieved by defining a custom tuplizer "
-"implementation. Tuplizer definitions are attached to the entity or component "
-"mapping they are meant to manage. Going back to the example of our customer "
-"entity:"
-msgstr ""
-"用户也可以插入其自定义的 tuplizer。或许您需要一种不同于 dynamic-map entity-"
-"mode 中使用的 <literal>java.util.HashMap</literal> 的 <literal>java.util."
-"Map</literal> 实现;或许您需要与默认策略不同的代理生成策略(proxy generation "
-"strategy)。通过自定义 tuplizer 实现,这两个目标您都可以达到。Tuplizer 定义被"
-"附加到它们期望管理的 entity 或者 component 映射中。回到我们的 customer "
-"entity 例子: "
+msgid "Users can also plug in their own tuplizers. Perhaps you require that a <literal>java.util.Map</literal> implementation other than <literal>java.util.HashMap</literal> be used while in the dynamic-map entity-mode. Or perhaps you need to define a different proxy generation strategy than the one used by default. Both would be achieved by defining a custom tuplizer implementation. Tuplizer definitions are attached to the entity or component mapping they are meant to manage. Going back to the example of our customer entity:"
+msgstr "用户也可以插入其自定义的 tuplizer。或许您需要一种不同于 dynamic-map entity-mode 中使用的 <literal>java.util.HashMap</literal> 的 <literal>java.util.Map</literal> 实现;或许您需要与默认策略不同的代理生成策略(proxy generation strategy)。通过自定义 tuplizer 实现,这两个目标您都可以达到。Tuplizer 定义被附加到它们期望管理的 entity 或者 component 映射中。回到我们的 customer entity 例子: "
#. Tag: title
#, no-c-format
@@ -536,60 +267,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <interfacename>org.hibernate.EntityNameResolver</interfacename> "
-"interface is a contract for resolving the entity name of a given entity "
-"instance. The interface defines a single method "
-"<methodname>resolveEntityName</methodname> which is passed the entity "
-"instance and is expected to return the appropriate entity name (null is "
-"allowed and would indicate that the resolver does not know how to resolve "
-"the entity name of the given entity instance). Generally speaking, an "
-"<interfacename>org.hibernate.EntityNameResolver</interfacename> is going to "
-"be most useful in the case of dynamic models. One example might be using "
-"proxied interfaces as your domain model. The hibernate test suite has an "
-"example of this exact style of usage under the <package>org.hibernate.test."
-"dynamicentity.tuplizer2</package>. Here is some of the code from that "
-"package for illustration."
-msgstr ""
-"<interfacename>org.hibernate.EntityNameResolver</interfacename> 接口是一个解"
-"析给定实体实例的实体名称的合约。这个接口定义了一个单一的方法 "
-"<methodname>resolveEntityName</methodname>,它传递实体实例并预期返回合适的实"
-"体名称(null 指明解析器不知道如何解析给定实体实例的实体名称)。一般说来,"
-"<interfacename>org.hibernate.EntityNameResolver</interfacename> 在动态模型里"
-"最为有用。其中的例子是把代理接口用作你的域模型。Hibernate Test Suite 在 "
-"<package>org.hibernate.test.dynamicentity.tuplizer2</package> 下有具有完全相"
-"同风格的例子。下面是该包里的一些代码:"
+msgid "The <interfacename>org.hibernate.EntityNameResolver</interfacename> interface is a contract for resolving the entity name of a given entity instance. The interface defines a single method <methodname>resolveEntityName</methodname> which is passed the entity instance and is expected to return the appropriate entity name (null is allowed and would indicate that the resolver does not know how to resolve the entity name of the given entity instance). Generally speaking, an <interfacename>org.hibernate.EntityNameResolver</interfacename> is going to be most useful in the case of dynamic models. One example might be using proxied interfaces as your domain model. The hibernate test suite has an example of this exact style of usage under the <package>org.hibernate.test.dynamicentity.tuplizer2</package>. Here is some of the code from that package for illustration."
+msgstr "<interfacename>org.hibernate.EntityNameResolver</interfacename> 接口是一个解析给定实体实例的实体名称的合约。这个接口定义了一个单一的方法 <methodname>resolveEntityName</methodname>,它传递实体实例并预期返回合适的实体名称(null 指明解析器不知道如何解析给定实体实例的实体名称)。一般说来,<interfacename>org.hibernate.EntityNameResolver</interfacename> 在动态模型里最为有用。其中的例子是把代理接口用作你的域模型。Hibernate Test Suite 在 <package>org.hibernate.test.dynamicentity.tuplizer2</package> 下有具有完全相同风格的例子。下面是该包里的一些代码:"
#. Tag: para
#, no-c-format
-msgid ""
-"In order to register an <interfacename>org.hibernate.EntityNameResolver</"
-"interfacename> users must either:"
-msgstr ""
-"为了注册 <interfacename>org.hibernate.EntityNameResolver</interfacename>,用"
-"户必须:"
+msgid "In order to register an <interfacename>org.hibernate.EntityNameResolver</interfacename> users must either:"
+msgstr "为了注册 <interfacename>org.hibernate.EntityNameResolver</interfacename>,用户必须:"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Implement a custom <link linkend=\"persistent-classes-tuplizers\">Tuplizer</"
-"link>, implementing the <methodname>getEntityNameResolvers</methodname> "
-"method."
-msgstr ""
-"实现自定义的 <xref linkend=\"persistent-classes-tuplizers\"/> 并实现 "
-"<methodname>getEntityNameResolvers</methodname> 方法。 "
+#, no-c-format
+msgid "Implement a custom <link linkend=\"persistent-classes-tuplizers\">Tuplizer</link>, implementing the <methodname>getEntityNameResolvers</methodname> method."
+msgstr "实现自定义的 <link linkend=\"persistent-classes-tuplizers\">Tuplizer</link> 并实现 <methodname>getEntityNameResolvers</methodname> 方法。"
#. Tag: para
#, no-c-format
-msgid ""
-"Register it with the <classname>org.hibernate.impl.SessionFactoryImpl</"
-"classname> (which is the implementation class for <interfacename>org."
-"hibernate.SessionFactory</interfacename>) using the "
-"<methodname>registerEntityNameResolver</methodname> method."
-msgstr ""
-"用 <methodname>registerEntityNameResolver</methodname> 方法注册到 "
-"<classname>org.hibernate.impl.SessionFactoryImpl</classname>(它是 "
-"<interfacename>org.hibernate.SessionFactory</interfacename> 的实现类)。"
+msgid "Register it with the <classname>org.hibernate.impl.SessionFactoryImpl</classname> (which is the implementation class for <interfacename>org.hibernate.SessionFactory</interfacename>) using the <methodname>registerEntityNameResolver</methodname> method."
+msgstr "用 <methodname>registerEntityNameResolver</methodname> 方法注册到 <classname>org.hibernate.impl.SessionFactoryImpl</classname>(它是 <interfacename>org.hibernate.SessionFactory</interfacename> 的实现类)。"
#~ msgid ""
#~ "<![CDATA[package eg;\n"
@@ -749,7 +443,6 @@
#~ " kittens.add(kitten);\n"
#~ " }\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[package eg;\n"
#~ "\n"
@@ -776,7 +469,6 @@
#~ " this.name=name;\n"
#~ " }\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[public class Cat {\n"
#~ "\n"
@@ -825,7 +517,6 @@
#~ " }\n"
#~ "\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[<hibernate-mapping>\n"
#~ "\n"
@@ -894,7 +585,6 @@
#~ " </class>\n"
#~ " \n"
#~ "</hibernate-mapping>]]>"
-
#~ msgid ""
#~ "<![CDATA[Session s = openSession();\n"
#~ "Transaction tx = s.beginTransaction();\n"
@@ -939,7 +629,6 @@
#~ "\n"
#~ "tx.commit();\n"
#~ "s.close();]]>"
-
#~ msgid ""
#~ "<![CDATA[Session dynamicSession = pojoSession.getSession(EntityMode."
#~ "MAP);\n"
@@ -968,7 +657,6 @@
#~ "...\n"
#~ "// Continue on pojoSession\n"
#~ "]]>"
-
#~ msgid ""
#~ "<![CDATA[<hibernate-mapping>\n"
#~ " <class entity-name=\"Customer\">\n"
@@ -1043,7 +731,6 @@
#~ " }\n"
#~ " }\n"
#~ "}]]>"
-
#~ msgid ""
#~ "/**\n"
#~ " * A very trivial JDK Proxy InvocationHandler implementation where we "
@@ -1302,3 +989,4 @@
#~ "\n"
#~ " ...\n"
#~ "}"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/portability.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/portability.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/portability.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-03-12T00:03:47\n"
-"PO-Revision-Date: 2010-01-11 10:57+1000\n"
+"PO-Revision-Date: 2010-03-16 10:10+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -26,21 +26,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"One of the selling points of Hibernate (and really Object/Relational Mapping "
-"as a whole) is the notion of database portability. This could mean an "
-"internal IT user migrating from one database vendor to another, or it could "
-"mean a framework or deployable application consuming Hibernate to "
-"simultaneously target multiple database products by their users. Regardless "
-"of the exact scenario, the basic idea is that you want Hibernate to help you "
-"run against any number of databases without changes to your code, and "
-"ideally without any changes to the mapping metadata."
-msgstr ""
-"Hibernate(实际上是整个 Object/Relational Mapping)的一个卖点是数据库的移植"
-"性。这意味着内部的 IT 用户可以改变数据库供应商,或者可部署的应用程序/框架使"
-"用 Hibernate 来同时使用多个数据库产品。不考虑具体的应用情景,这里的基本概念"
-"是 Hibernate 可帮助你运行多种数据库而无需修改你的代码,理想情况下甚至不用修改"
-"映射元数据。"
+msgid "One of the selling points of Hibernate (and really Object/Relational Mapping as a whole) is the notion of database portability. This could mean an internal IT user migrating from one database vendor to another, or it could mean a framework or deployable application consuming Hibernate to simultaneously target multiple database products by their users. Regardless of the exact scenario, the basic idea is that you want Hibernate to help you run against any number of databases without changes to your code, and ideally without any changes to the mapping metadata."
+msgstr "Hibernate(实际上是整个 Object/Relational Mapping)的一个卖点是数据库的移植性。这意味着内部的 IT 用户可以改变数据库供应商,或者可部署的应用程序/框架使用 Hibernate 来同时使用多个数据库产品。不考虑具体的应用情景,这里的基本概念是 Hibernate 可帮助你运行多种数据库而无需修改你的代码,理想情况下甚至不用修改映射元数据。"
#. Tag: title
#, no-c-format
@@ -49,21 +36,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The first line of portability for Hibernate is the dialect, which is a "
-"specialization of the <classname>org.hibernate.dialect.Dialect</classname> "
-"contract. A dialect encapsulates all the differences in how Hibernate must "
-"communicate with a particular database to accomplish some task like getting "
-"a sequence value or structuring a SELECT query. Hibernate bundles a wide "
-"range of dialects for many of the most popular databases. If you find that "
-"your particular database is not among them, it is not terribly difficult to "
-"write your own."
-msgstr ""
-"Hibernate 的移植性的首要问题是方言(dialect),也就是 <classname>org."
-"hibernate.dialect.Dialect</classname> 合约的具体实例。方言封装了 Hibernate 和"
-"特定数据库通讯以完成某些任务如获取序列值或构建 SELECT 查询等的所有差异。"
-"Hibernate 捆绑了用于许多最常用的数据库的方言。如果你发现自己使用的数据库不在"
-"其中,编写自定义的方言也不是很困难的事情。"
+msgid "The first line of portability for Hibernate is the dialect, which is a specialization of the <classname>org.hibernate.dialect.Dialect</classname> contract. A dialect encapsulates all the differences in how Hibernate must communicate with a particular database to accomplish some task like getting a sequence value or structuring a SELECT query. Hibernate bundles a wide range of dialects for many of the most popular databases. If you find that your particular database is not among them, it is not terribly difficult to write your own."
+msgstr "Hibernate 的移植性的首要问题是方言(dialect),也就是 <classname>org.hibernate.dialect.Dialect</classname> 合约的具体实例。方言封装了 Hibernate 和特定数据库通讯以完成某些任务如获取序列值或构建 SELECT 查询等的所有差异。Hibernate 捆绑了用于许多最常用的数据库的方言。如果你发现自己使用的数据库不在其中,编写自定义的方言也不是很困难的事情。"
#. Tag: title
#, no-c-format
@@ -72,82 +46,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Originally, Hibernate would always require that users specify which dialect "
-"to use. In the case of users looking to simultaneously target multiple "
-"databases with their build that was problematic. Generally this required "
-"their users to configure the Hibernate dialect or defining their own method "
-"of setting that value."
-msgstr ""
-"最开始,Hibernate 总是要求用户指定所使用的方言(dialect)。在用户希望同时使用"
-"多个数据库时就会出现问题。通常这要求用户配置 Hibernate 方言或者定义自己设置这"
-"个值的方法。"
+msgid "Originally, Hibernate would always require that users specify which dialect to use. In the case of users looking to simultaneously target multiple databases with their build that was problematic. Generally this required their users to configure the Hibernate dialect or defining their own method of setting that value."
+msgstr "最开始,Hibernate 总是要求用户指定所使用的方言(dialect)。在用户希望同时使用多个数据库时就会出现问题。通常这要求用户配置 Hibernate 方言或者定义自己设置这个值的方法。"
#. Tag: para
#, no-c-format
-msgid ""
-"Starting with version 3.2, Hibernate introduced the notion of automatically "
-"detecting the dialect to use based on the <interfacename>java.sql."
-"DatabaseMetaData</interfacename> obtained from a <interfacename>java.sql."
-"Connection</interfacename> to that database. This was much better, expect "
-"that this resolution was limited to databases Hibernate know about ahead of "
-"time and was in no way configurable or overrideable."
-msgstr ""
-"从版本 3.2 开始,Hibernate 引入了方言的自动检测,它基于从该数据库的 "
-"<interfacename>java.sql.Connection</interfacename> 上获得的 "
-"<interfacename>java.sql.DatabaseMetaData</interfacename>。这是一个更好的方"
-"案,但它局限于 Hibernate 已知的数据库且无法进行配置和覆盖。"
+msgid "Starting with version 3.2, Hibernate introduced the notion of automatically detecting the dialect to use based on the <interfacename>java.sql.DatabaseMetaData</interfacename> obtained from a <interfacename>java.sql.Connection</interfacename> to that database. This was much better, expect that this resolution was limited to databases Hibernate know about ahead of time and was in no way configurable or overrideable."
+msgstr "从版本 3.2 开始,Hibernate 引入了方言的自动检测,它基于从该数据库的 <interfacename>java.sql.Connection</interfacename> 上获得的 <interfacename>java.sql.DatabaseMetaData</interfacename>。这是一个更好的方案,但它局限于 Hibernate 已知的数据库且无法进行配置和覆盖。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Starting with version 3.3, Hibernate has a fare more powerful way to "
-"automatically determine which dialect to should be used by relying on a "
-"series of delegates which implement the <interfacename>org.hibernate.dialect."
-"resolver.DialectResolver</interfacename> which defines only a single method:"
-"<programlisting role=\"JAVA\">public Dialect resolveDialect(DatabaseMetaData "
-"metaData) throws JDBCConnectionException</programlisting>. The basic "
-"contract here is that if the resolver 'understands' the given database "
-"metadata then it returns the corresponding Dialect; if not it returns null "
-"and the process continues to the next resolver. The signature also "
-"identifies <exceptionname>org.hibernate.exception.JDBCConnectionException</"
-"exceptionname> as possibly being thrown. A JDBCConnectionException here is "
-"interpreted to imply a \"non transient\" (aka non-recoverable) connection "
-"problem and is used to indicate an immediate stop to resolution attempts. "
-"All other exceptions result in a warning and continuing on to the next "
-"resolver."
-msgstr ""
-"从版本 3.3 开始,Hibernate 有了更为强大的自动决定应该使用哪个方言的方法,这根"
-"据一系列实现 <interfacename>org.hibernate.dialect.resolver.DialectResolver</"
-"interfacename> 接口的代理,它们只定义一个方法:<programlisting>public "
-"Dialect resolveDialect(DatabaseMetaData metaData) throws "
-"JDBCConnectionException</programlisting>。这里的基本合约是如果解析者"
-"(resolver)“理解”给点数据库的元数据并返回对应的方言;否则返回 null 并使用下"
-"一个解析者。这个签名也指定可能抛出的异常 <exceptionname>org.hibernate."
-"exception.JDBCConnectionException</exceptionname>。这里的 "
-"JDBCConnectionException 被认为是“非瞬时的”(也就是不可恢复的)连接问题且指示"
-"立即终止解析。所有其他的异常都导致警告发出并使用下一个解析者。\""
+#, no-c-format
+msgid "Starting with version 3.3, Hibernate has a fare more powerful way to automatically determine which dialect to should be used by relying on a series of delegates which implement the <interfacename>org.hibernate.dialect.resolver.DialectResolver</interfacename> which defines only a single method:<programlisting role=\"JAVA\">public Dialect resolveDialect(DatabaseMetaData metaData) throws JDBCConnectionException</programlisting>. The basic contract here is that if the resolver 'understands' the given database metadata then it returns the corresponding Dialect; if not it returns null and the process continues to the next resolver. The signature also identifies <exceptionname>org.hibernate.exception.JDBCConnectionException</exceptionname> as possibly being thrown. A JDBCConnectionException here is interpreted to imply a \"non transient\" (aka non-recoverable) connection problem and is used to indicate an immediate stop to resolution attempts. All other exceptions result in!
a warning and continuing on to the next resolver."
+msgstr "从版本 3.3 开始,Hibernate 有了更为强大的自动决定应该使用哪个方言的方法,这根据一系列实现 <interfacename>org.hibernate.dialect.resolver.DialectResolver</interfacename> 接口的代理,它们只定义一个方法:<programlisting role=\"JAVA\">public Dialect resolveDialect(DatabaseMetaData metaData) throws JDBCConnectionException</programlisting>。这里的基本合约是如果解析者(resolver)“理解”给点数据库的元数据并返回对应的方言;否则返回 null 并使用下一个解析者。这个签名也指定可能抛出的异常 <exceptionname>org.hibernate.exception.JDBCConnectionException</exceptionname>。这里的 JDBCConnectionException 被认为是“非瞬时的”(也就是不可恢复的)连接问题且指示立即终止解析。所有其他的异常都导致警告发出并使用下一个解析者。\""
#. Tag: para
#, no-c-format
-msgid ""
-"The cool part about these resolvers is that users can also register their "
-"own custom resolvers which will be processed ahead of the built-in Hibernate "
-"ones. This might be useful in a number of different situations: it allows "
-"easy integration for auto-detection of dialects beyond those shipped with "
-"HIbernate itself; it allows you to specify to use a custom dialect when a "
-"particular database is recognized; etc. To register one or more resolvers, "
-"simply specify them (seperated by commas, tabs or spaces) using the "
-"'hibernate.dialect_resolvers' configuration setting (see the "
-"<constant>DIALECT_RESOLVERS</constant> constant on <classname>org.hibernate."
-"cfg.Environment</classname>)."
-msgstr ""
-"这些解析者最棒的功能是用户也可以注册自定义的解析者,它们将在内置的解析者之前"
-"被调用。在许多情况下这可能很有用:它可以轻易地集成内置方言之外的方言的自动检"
-"测;它让你可以使用自定义的方言等。要注册一个或多个解析者,只要用 'hibernate."
-"dialect_resolvers' 配置设置指定它们(由逗号、制表符或空格隔开)就可以了(请参"
-"考 <classname>org.hibernate.cfg.Environment</classname> 上的 "
-"<constant>DIALECT_RESOLVERS</constant>)。"
+msgid "The cool part about these resolvers is that users can also register their own custom resolvers which will be processed ahead of the built-in Hibernate ones. This might be useful in a number of different situations: it allows easy integration for auto-detection of dialects beyond those shipped with HIbernate itself; it allows you to specify to use a custom dialect when a particular database is recognized; etc. To register one or more resolvers, simply specify them (seperated by commas, tabs or spaces) using the 'hibernate.dialect_resolvers' configuration setting (see the <constant>DIALECT_RESOLVERS</constant> constant on <classname>org.hibernate.cfg.Environment</classname>)."
+msgstr "这些解析者最棒的功能是用户也可以注册自定义的解析者,它们将在内置的解析者之前被调用。在许多情况下这可能很有用:它可以轻易地集成内置方言之外的方言的自动检测;它让你可以使用自定义的方言等。要注册一个或多个解析者,只要用 'hibernate.dialect_resolvers' 配置设置指定它们(由逗号、制表符或空格隔开)就可以了(请参考 <classname>org.hibernate.cfg.Environment</classname> 上的 <constant>DIALECT_RESOLVERS</constant>)。"
#. Tag: title
#, no-c-format
@@ -155,91 +70,44 @@
msgstr "标识符的生成"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"When considering portability between databases, another important decision "
-"is selecting the identifier generation stratagy you want to use. Originally "
-"Hibernate provided the <emphasis>native</emphasis> generator for this "
-"purpose, which was intended to select between a <emphasis>sequence</"
-"emphasis>, <emphasis>identity</emphasis>, or <emphasis>table</emphasis> "
-"strategy depending on the capability of the underlying database. However, an "
-"insidious implication of this approach comes about when targtetting some "
-"databases which support <emphasis>identity</emphasis> generation and some "
-"which do not. <emphasis>identity</emphasis> generation relies on the SQL "
-"definition of an IDENTITY (or auto-increment) column to manage the "
-"identifier value; it is what is known as a post-insert generation strategy "
-"becauase the insert must actually happen before we can know the identifier "
-"value. Because Hibernate relies on this identifier value to uniquely "
-"reference entities within a persistence context it must then issue the "
-"insert immediately when the users requests the entitiy be associated with "
-"the session (like via save() e.g.) regardless of current transactional "
-"semantics."
-msgstr ""
-"当考虑数据库的移植性时,另外一个重要的考量是选择标识符生成策略。Hibernate 原"
-"先提供的 <emphasis>native</emphasis> 生成器的目的是根据底层数据库的能力在 "
-"<emphasis>sequence</emphasis>、<emphasis>identity</emphasis> 或 "
-"<emphasis>table</emphasis> 策略间进行选择。然而,这个方法一个潜在的问题是有些"
-"数据库支持<emphasis>标识符(identity)</emphasis>生成而有些则不支持。"
-"<emphasis>标识符(identity)</emphasis> 生成依赖于管理标识符值的 IDENTITY"
-"(或 auto-increment)字段的 SQL 定义。它也成为 post-insert 生成策略,因为 "
-"insert 必须在知道标识符值后才能实际发生。因为 Hibernate 依赖于这个标识符值来"
-"唯一地引用持久性上下文里的实体,当用户请求和会话相关联的实体时,它必须立即执"
-"行 insert 语句而不管当前的事务性语义。底层的问题是应用程序的语义在这些情况下"
-"自身也会发生改变。"
+#, no-c-format
+msgid "When considering portability between databases, another important decision is selecting the identifier generation stratagy you want to use. Originally Hibernate provided the <emphasis>native</emphasis> generator for this purpose, which was intended to select between a <emphasis>sequence</emphasis>, <emphasis>identity</emphasis>, or <emphasis>table</emphasis> strategy depending on the capability of the underlying database. However, an insidious implication of this approach comes about when targtetting some databases which support <emphasis>identity</emphasis> generation and some which do not. <emphasis>identity</emphasis> generation relies on the SQL definition of an IDENTITY (or auto-increment) column to manage the identifier value; it is what is known as a post-insert generation strategy becauase the insert must actually happen before we can know the identifier value. Because Hibernate relies on this identifier value to uniquely reference entities within a persisten!
ce context it must then issue the insert immediately when the users requests the entitiy be associated with the session (like via save() e.g.) regardless of current transactional semantics."
+msgstr "当考虑数据库的移植性时,另外一个重要的考量是选择标识符生成策略。Hibernate 原先提供的 <emphasis>native</emphasis> 生成器的目的是根据底层数据库的能力在 <emphasis>sequence</emphasis>、<emphasis>identity</emphasis> 或 <emphasis>table</emphasis> 策略间进行选择。然而,这个方法一个潜在的问题是有些数据库支持<emphasis>标识符(identity)</emphasis>生成而有些则不支持。<emphasis>标识符(identity)</emphasis> 生成依赖于管理标识符值的 IDENTITY(或 auto-increment)字段的 SQL 定义。它也成为 post-insert 生成策略,因为 insert 必须在知道标识符值后才能实际发生。因为 Hibernate 依赖于这个标识符值来唯一地引用持久性上下文里的实体,当用户请求和会话相关联的实体时(如通过 save()),它必须立即执行 insert 语句而不管当前的事务性语义。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Hibernate was changed slightly once the implication of this was better "
-"understood so that the insert is delayed in cases where that is feasible."
-msgstr "Hibernate 已经进行了改进,所以在可行时这种插入会被延迟。"
+#, no-c-format
+msgid "Hibernate was changed slightly once the implication of this was better understood so that the insert is delayed in cases where that is feasible."
+msgstr "Hibernate 已经进行了轻微改进,所以在可行时这种插入会被延迟。"
#. Tag: note
#, no-c-format
-msgid ""
-"The underlying issue is that the actual semanctics of the application itself "
-"changes in these cases."
-msgstr ""
+msgid "The underlying issue is that the actual semanctics of the application itself changes in these cases."
+msgstr "底层的问题是这些例子里应用程序自身的实际模式的改变。"
#. Tag: para
#, no-c-format
-msgid ""
-"Starting with version 3.2.3, Hibernate comes with a set of <ulink url="
-"\"http://in.relation.to/2082.lace\">enhanced</ulink> identifier generators "
-"targetting portability in a much different way."
-msgstr ""
-"从 3.2.3 版本开始,Hibernate 带有一套 <ulink url=\"http://in.relation."
-"to/2082.lace\">enhanced</ulink> 标识符生成器,它以很不同的方式实现移植性。"
+msgid "Starting with version 3.2.3, Hibernate comes with a set of <ulink url=\"http://in.relation.to/2082.lace\">enhanced</ulink> identifier generators targetting portability in a much different way."
+msgstr "从 3.2.3 版本开始,Hibernate 带有一套 <ulink url=\"http://in.relation.to/2082.lace\">enhanced</ulink> 标识符生成器,它以很不同的方式实现移植性。"
#. Tag: para
#, no-c-format
-msgid ""
-"There are specifically 2 bundled <emphasis>enhanced</emphasis>generators:"
+msgid "There are specifically 2 bundled <emphasis>enhanced</emphasis>generators:"
msgstr "特别是两个捆绑的 <emphasis>enhanced</emphasis> 生成器:"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname>"
-msgstr ""
-"<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname>"
+msgstr "<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname>"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<classname>org.hibernate.id.enhanced.TableGenerator</classname>"
msgstr "<classname>org.hibernate.id.enhanced.TableGenerator</classname>"
#. Tag: note
#, no-c-format
-msgid ""
-"The idea behind these generators is to port the actual semantics of the "
-"identifer value generation to the different databases. For example, the "
-"<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname> "
-"mimics the behavior of a sequence on databases which do not support "
-"sequences by using a table."
-msgstr ""
-"这些生成器背后的概念是把标识符值生成的实际情景移植到不同的数据库里。例如,"
-"<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname> 通过"
-"使用表来模拟不支持序列(sequences)的数据库上的序列行为。"
+msgid "The idea behind these generators is to port the actual semantics of the identifer value generation to the different databases. For example, the <classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname> mimics the behavior of a sequence on databases which do not support sequences by using a table."
+msgstr "这些生成器背后的概念是把标识符值生成的实际情景移植到不同的数据库里。例如,<classname>org.hibernate.id.enhanced.SequenceStyleGenerator</classname> 通过使用表来模拟不支持序列(sequences)的数据库上的序列行为。"
#. Tag: title
#, no-c-format
@@ -248,47 +116,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This is an area in Hibernate in need of improvement. In terms of portability "
-"concerns, this function handling currently works pretty well from HQL; "
-"however, it is quite lacking in all other aspects."
-msgstr ""
+msgid "This is an area in Hibernate in need of improvement. In terms of portability concerns, this function handling currently works pretty well from HQL; however, it is quite lacking in all other aspects."
+msgstr "这是 Hibernate 需要提高的一个领域。从可移植性来说,这个功能可以很好地处理 HQL 的内容,但在其他方面就有所欠缺。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"SQL functions can be referenced in many ways by users. However, not all "
-"databases support the same set of functions. Hibernate, provides a means of "
-"mapping a <emphasis>logical</emphasis> function name to a delegate which "
-"knows how to render that particular function, perhaps even using a totally "
-"different physical function call."
-msgstr ""
-"用户可以以多种方式引用 SQL 函数。然而,不是所有的数据库都支持相同的函数集。"
-"Hibernate 提供了一种映射<emphasis>逻辑</emphasis>函数名到代理的方法,这个代理"
-"知道如何解析特定的函数,甚至可能使用完全不同的物理函数调用。"
+#, no-c-format
+msgid "SQL functions can be referenced in many ways by users. However, not all databases support the same set of functions. Hibernate, provides a means of mapping a <emphasis>logical</emphasis> function name to a delegate which knows how to render that particular function, perhaps even using a totally different physical function call."
+msgstr "用户可以以多种方式引用 SQL 函数。然而,不是所有的数据库都支持相同的函数集。Hibernate 提供了一种映射<emphasis>逻辑</emphasis>函数名到代理的方法,这个代理知道如何解析特定的函数,甚至可能使用完全不同的物理函数调用。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Technically this function registration is handled through the <classname>org."
-"hibernate.dialect.function.SQLFunctionRegistry</classname> class which is "
-"intended to allow users to provide custom function definitions without "
-"having to provide a custom dialect. This specific behavior is not fully "
-"completed as of yet."
-msgstr ""
-"从技术上来讲,这个函数注册是通过 <classname>org.hibernate.dialect.function."
-"SQLFunctionRegistry</classname> 类进行处理的,它的目的是允许用户提供自定义的"
-"函数定义而无需提供自定义的方言。这种特殊的行为目前还未全部开发完毕。"
+msgid "Technically this function registration is handled through the <classname>org.hibernate.dialect.function.SQLFunctionRegistry</classname> class which is intended to allow users to provide custom function definitions without having to provide a custom dialect. This specific behavior is not fully completed as of yet."
+msgstr "从技术上来讲,这个函数注册是通过 <classname>org.hibernate.dialect.function.SQLFunctionRegistry</classname> 类进行处理的,它的目的是允许用户提供自定义的函数定义而无需提供自定义的方言。这种特殊的行为目前还未全部开发完毕。"
#. Tag: para
#, no-c-format
-msgid ""
-"It is sort of implemented such that users can programatically register "
-"functions with the <classname>org.hibernate.cfg.Configuration</classname> "
-"and those functions will be recognized for HQL."
-msgstr ""
-"其中一些功能已经实现,如用户可以在程序里用 <classname>org.hibernate.cfg."
-"Configuration</classname> 注册函数且这些函数可被 HQL 识别。"
+msgid "It is sort of implemented such that users can programatically register functions with the <classname>org.hibernate.cfg.Configuration</classname> and those functions will be recognized for HQL."
+msgstr "其中一些功能已经实现,如用户可以在程序里用 <classname>org.hibernate.cfg.Configuration</classname> 注册函数且这些函数可被 HQL 识别。"
#. Tag: title
#, no-c-format
@@ -304,3 +148,4 @@
#~ "This is a new area in Hibernate and as such it is not as mature as the "
#~ "overall Hibernate experience."
#~ msgstr "这是 Hibernate 的一个新的领域,暂时还不如 Hibernate 总体那么成熟。"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/preface.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/preface.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/preface.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:15\n"
-"PO-Revision-Date: 2010-01-11 10:44+1000\n"
+"PO-Revision-Date: 2010-03-16 10:12+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -20,108 +20,44 @@
msgstr "前言"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Working with object-oriented software and a relational database can be "
-"cumbersome and time consuming in today's enterprise environments. Hibernate "
-"is an Object/Relational Mapping tool for Java environments. The term Object/"
-"Relational Mapping (ORM) refers to the technique of mapping a data "
-"representation from an object model to a relational data model with a SQL-"
-"based schema."
-msgstr ""
-"在今日的企业环境中,把面向对象的软件和关系型数据库一起使用可能是相当麻烦和浪"
-"费时间的。Hibernate 是一个面向 Java 环境的对象/关系型数据库映射工具。对象/关"
-"系型数据库映射(object/relational mapping,ORM)这个术语表示一种技术,用来把"
-"对象模型表示的对象映射到基于 SQL 的关系模型数据结构中去。"
+#, no-c-format
+msgid "Working with object-oriented software and a relational database can be cumbersome and time consuming in today's enterprise environments. Hibernate is an Object/Relational Mapping tool for Java environments. The term Object/Relational Mapping (ORM) refers to the technique of mapping a data representation from an object model to a relational data model with a SQL-based schema."
+msgstr "在今日的企业环境中,把面向对象的软件和关系型数据库一起使用可能是相当麻烦和浪费时间的。Hibernate 是一个面向 Java 环境的对象/关系型数据库映射工具。对象/关系型数据库映射(object/relational mapping,ORM)这个术语表示一种技术,用来把对象模型表示的对象映射到基于 SQL 的关系模型数据结构中去。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate not only takes care of the mapping from Java classes to database "
-"tables (and from Java data types to SQL data types), but also provides data "
-"query and retrieval facilities. It can also significantly reduce development "
-"time otherwise spent with manual data handling in SQL and JDBC."
-msgstr ""
-"Hibernate 不仅管理 Java 类到数据库表的映射(包括 Java 数据类型到 SQL 数据类型"
-"的映射),还提供数据查询和获取数据的方法,可以大幅度减少开发时对人工使用 SQL "
-"和 JDBC 处理数据的时间。 "
+msgid "Hibernate not only takes care of the mapping from Java classes to database tables (and from Java data types to SQL data types), but also provides data query and retrieval facilities. It can also significantly reduce development time otherwise spent with manual data handling in SQL and JDBC."
+msgstr "Hibernate 不仅管理 Java 类到数据库表的映射(包括 Java 数据类型到 SQL 数据类型的映射),还提供数据查询和获取数据的方法,可以大幅度减少开发时对人工使用 SQL 和 JDBC 处理数据的时间。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate's goal is to relieve the developer from 95 percent of common data "
-"persistence related programming tasks. Hibernate may not be the best "
-"solution for data-centric applications that only use stored-procedures to "
-"implement the business logic in the database, it is most useful with object-"
-"oriented domain models and business logic in the Java-based middle-tier. "
-"However, Hibernate can certainly help you to remove or encapsulate vendor-"
-"specific SQL code and will help with the common task of result set "
-"translation from a tabular representation to a graph of objects."
-msgstr ""
-"Hibernate 的目标是对于开发者通常的数据持久化相关的编程任务,解放其中的 95%。"
-"对于以数据为中心的程序来说,它们往往只在数据库中使用存储过程来实现商业逻辑,"
-"Hibernate 可能不是最好的解决方案;对于那些在基于 Java 的中间层应用中,它们实"
-"现面向对象的业务模型和商业逻辑的应用,Hibernate 是最有用的。不管怎样,"
-"Hibernate 一定可以帮助你消除或者包装那些针对特定厂商的 SQL 代码,而且帮助你结"
-"果集从表格式的表示形式转换到一系列的对象中去。 "
+msgid "Hibernate's goal is to relieve the developer from 95 percent of common data persistence related programming tasks. Hibernate may not be the best solution for data-centric applications that only use stored-procedures to implement the business logic in the database, it is most useful with object-oriented domain models and business logic in the Java-based middle-tier. However, Hibernate can certainly help you to remove or encapsulate vendor-specific SQL code and will help with the common task of result set translation from a tabular representation to a graph of objects."
+msgstr "Hibernate 的目标是对于开发者通常的数据持久化相关的编程任务,解放其中的 95%。对于以数据为中心的程序来说,它们往往只在数据库中使用存储过程来实现商业逻辑,Hibernate 可能不是最好的解决方案;对于那些在基于 Java 的中间层应用中,它们实现面向对象的业务模型和商业逻辑的应用,Hibernate 是最有用的。不管怎样,Hibernate 一定可以帮助你消除或者包装那些针对特定厂商的 SQL 代码,而且帮助你结果集从表格式的表示形式转换到一系列的对象中去。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you are new to Hibernate and Object/Relational Mapping or even Java, "
-"please follow these steps:"
-msgstr ""
-"如果你对 Hibernate 和对象/关系型数据库映射还是个新手,甚至对 Java 也不熟悉,"
-"请按照下面的步骤来学习。"
+msgid "If you are new to Hibernate and Object/Relational Mapping or even Java, please follow these steps:"
+msgstr "如果你对 Hibernate 和对象/关系型数据库映射还是个新手,甚至对 Java 也不熟悉,请按照下面的步骤来学习。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Read <xref linkend=\"tutorial\" /> for a tutorial with step-by-step "
-"instructions. The source code for the tutorial is included in the "
-"distribution in the <literal>doc/reference/tutorial/</literal> directory."
-msgstr ""
-"阅读 <xref linkend=\"tutorial\"/>,这是一篇包含详细的逐步知道的指南。本指南的"
-"源代码包含在发行包里,你可以在 <literal>doc/reference/tutorial/</literal> 目"
-"录下找到。 "
+#, no-c-format
+msgid "Read <xref linkend=\"tutorial\" /> for a tutorial with step-by-step instructions. The source code for the tutorial is included in the distribution in the <literal>doc/reference/tutorial/</literal> directory."
+msgstr "阅读 <xref linkend=\"tutorial\"/>,这是一篇包含详细的逐步知道的指南。本指南的源代码包含在发行包里,你可以在 <literal>doc/reference/tutorial/</literal> 目录下找到。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Read <xref linkend=\"architecture\" /> to understand the environments where "
-"Hibernate can be used."
-msgstr ""
-"阅读 <xref linkend=\"architecture\"/> 来理解 Hibernate 可以使用的环境。 "
+#, no-c-format
+msgid "Read <xref linkend=\"architecture\" /> to understand the environments where Hibernate can be used."
+msgstr "阅读 <xref linkend=\"architecture\"/> 来理解 Hibernate 可以使用的环境。"
#. Tag: para
#, no-c-format
-msgid ""
-"View the <literal>eg/</literal> directory in the Hibernate distribution. It "
-"contains a simple standalone application. Copy your JDBC driver to the "
-"<literal>lib/</literal> directory and edit <literal>etc/hibernate."
-"properties</literal>, specifying correct values for your database. From a "
-"command prompt in the distribution directory, type <literal>ant eg</literal> "
-"(using Ant), or under Windows, type <literal>build eg</literal>."
-msgstr ""
-"查看 Hibernate 发行包中的 <literal>eg/</literal> 目录,里面有个一简单的独立运"
-"行的程序。把你的 JDBC 驱动复制到 <literal>lib/</literal> 目录并修改一下 "
-"<literal>etc/hibernate.properties</literal>,指定数据库的信息。然后进入命令"
-"行,切换到发行包的目录,输入 <literal>ant eg</literal>(使用 Ant),或者在 "
-"Windows 系统下使用 <literal>build eg</literal>。 "
+msgid "View the <literal>eg/</literal> directory in the Hibernate distribution. It contains a simple standalone application. Copy your JDBC driver to the <literal>lib/</literal> directory and edit <literal>etc/hibernate.properties</literal>, specifying correct values for your database. From a command prompt in the distribution directory, type <literal>ant eg</literal> (using Ant), or under Windows, type <literal>build eg</literal>."
+msgstr "查看 Hibernate 发行包中的 <literal>eg/</literal> 目录,里面有个一简单的独立运行的程序。把你的 JDBC 驱动复制到 <literal>lib/</literal> 目录并修改一下 <literal>etc/hibernate.properties</literal>,指定数据库的信息。然后进入命令行,切换到发行包的目录,输入 <literal>ant eg</literal>(使用 Ant),或者在 Windows 系统下使用 <literal>build eg</literal>。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Use this reference documentation as your primary source of information. "
-"Consider reading <biblioref linkend=\"biblio-JPwH\"></biblioref> if you need "
-"more help with application design, or if you prefer a step-by-step tutorial. "
-"Also visit <ulink url=\"http://caveatemptor.hibernate.org\" /> and download "
-"the example application from <biblioref linkend=\"biblio-JPwH\"></biblioref>."
-msgstr ""
-"把这份文档作为你学习的主要信息来源。如果你需要应用程序设计方面的帮助或者你希"
-"望有一个按部就班的指南,你可以考虑阅读 <emphasis>Hibernate in Action</"
-"emphasis>(http://www.manning.com/bauer)。你也可以访问http://caveatemptor."
-"hibernate.org 并下载 Hibernate 的示例程序。 "
+#, no-c-format
+msgid "Use this reference documentation as your primary source of information. Consider reading <biblioref linkend=\"biblio-JPwH\"></biblioref> if you need more help with application design, or if you prefer a step-by-step tutorial. Also visit <ulink url=\"http://caveatemptor.hibernate.org\" /> and download the example application from <biblioref linkend=\"biblio-JPwH\"></biblioref>."
+msgstr "把这份文档作为你学习的主要信息来源。如果你需要应用程序设计方面的帮助或者你希望有一个按部就班的指南,你可以考虑阅读 <biblioref linkend=\"biblio-JPwH\"></biblioref> 。你也可以访问 <ulink url=\"http://caveatemptor.hibernate.org\" /> 并从 <biblioref linkend=\"biblio-JPwH\"></biblioref> 下载示例程序。"
#. Tag: para
#, no-c-format
@@ -129,53 +65,27 @@
msgstr "在 Hibernate 网站上可以找到问题和解答(FAQ)。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Links to third party demos, examples, and tutorials are maintained on the "
-"Hibernate website."
-msgstr "在 Hibernate 网站上还有第三方的演示、示例和教程的链接。"
+#, no-c-format
+msgid "Links to third party demos, examples, and tutorials are maintained on the Hibernate website."
+msgstr "在 Hibernate 网站上还有第三方的演示、示例和教程的链接。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The Community Area on the Hibernate website is a good resource for design "
-"patterns and various integration solutions (Tomcat, JBoss AS, Struts, EJB, "
-"etc.)."
-msgstr ""
-"Hibernate 网站的社区是讨论关于设计模式以及很多整合方案(Tomcat、JBoss AS、"
-"Struts、EJB 等)的好地方。"
+msgid "The Community Area on the Hibernate website is a good resource for design patterns and various integration solutions (Tomcat, JBoss AS, Struts, EJB, etc.)."
+msgstr "Hibernate 网站的社区是讨论关于设计模式以及很多整合方案(Tomcat、JBoss AS、Struts、EJB 等)的好地方。"
#. Tag: para
#, no-c-format
-msgid ""
-"If you have questions, use the user forum linked on the Hibernate website. "
-"We also provide a JIRA issue tracking system for bug reports and feature "
-"requests. If you are interested in the development of Hibernate, join the "
-"developer mailing list. If you are interested in translating this "
-"documentation into your language, contact us on the developer mailing list."
-msgstr ""
-"如果你有任何问题,请使用 Hibernate 网站上链接的用户论坛。我们也提供一个 JIRA "
-"问题追踪系统,来搜集 bug 报告和新的功能请求。如果对开发 Hibernate 有兴趣,请"
-"加入开发者的邮件列表。如果你对翻译本文档感兴趣,请通过开发者的邮件列表来联系"
-"我们。 "
+msgid "If you have questions, use the user forum linked on the Hibernate website. We also provide a JIRA issue tracking system for bug reports and feature requests. If you are interested in the development of Hibernate, join the developer mailing list. If you are interested in translating this documentation into your language, contact us on the developer mailing list."
+msgstr "如果你有任何问题,请使用 Hibernate 网站上链接的用户论坛。我们也提供一个 JIRA 问题追踪系统,来搜集 bug 报告和新的功能请求。如果对开发 Hibernate 有兴趣,请加入开发者的邮件列表。如果你对翻译本文档感兴趣,请通过开发者的邮件列表来联系我们。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Commercial development support, production support, and training for "
-"Hibernate is available through JBoss Inc. (see http://www.hibernate.org/"
-"SupportTraining/). Hibernate is a Professional Open Source project and a "
-"critical component of the JBoss Enterprise Middleware System (JEMS) suite of "
-"products."
-msgstr ""
-"商业开发、产品支持和 Hibernate 培训可以通过 JBoss Inc. 获得(请查阅: http://"
-"www.hibernate.org/SupportTraining/)。Hibernate 是一个专业的开源项目,也是 "
-"JBoss 企业级中间件系统(JBoss Enterprise Middleware System,JEMS)里的一个核"
-"心组件。 "
+msgid "Commercial development support, production support, and training for Hibernate is available through JBoss Inc. (see http://www.hibernate.org/SupportTraining/). Hibernate is a Professional Open Source project and a critical component of the JBoss Enterprise Middleware System (JEMS) suite of products."
+msgstr "商业开发、产品支持和 Hibernate 培训可以通过 JBoss Inc. 获得(请查阅: http://www.hibernate.org/SupportTraining/)。Hibernate 是一个专业的开源项目,也是 JBoss 企业级中间件系统(JBoss Enterprise Middleware System,JEMS)里的一个核心组件。 "
#~ msgid "Feedback"
#~ msgstr "反馈"
-
#~ msgid ""
#~ "Use <ulink url=\"http://opensource.atlassian.com/projects/hibernate"
#~ "\">Hibernate JIRA</ulink> to report errors or request enhacements to this "
@@ -183,3 +93,4 @@
#~ msgstr ""
#~ "用 <ulink url=\"http://opensource.atlassian.com/projects/hibernate"
#~ "\">Hibernate JIRA</ulink> 来报告错误或改进本文档。"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_criteria.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_criteria.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_criteria.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:15\n"
-"PO-Revision-Date: 2009-12-07 20:31+1000\n"
+"PO-Revision-Date: 2010-03-16 09:53+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -31,13 +31,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The interface <literal>org.hibernate.Criteria</literal> represents a query "
-"against a particular persistent class. The <literal>Session</literal> is a "
-"factory for <literal>Criteria</literal> instances."
-msgstr ""
-"<literal>org.hibernate.Criteria</literal >接口表示特定持久类的一个查询。"
-"<literal>Session</literal> 是 <literal>Criteria</literal> 实例的工厂。"
+msgid "The interface <literal>org.hibernate.Criteria</literal> represents a query against a particular persistent class. The <literal>Session</literal> is a factory for <literal>Criteria</literal> instances."
+msgstr "<literal>org.hibernate.Criteria</literal >接口表示特定持久类的一个查询。<literal>Session</literal> 是 <literal>Criteria</literal> 实例的工厂。"
#. Tag: title
#, no-c-format
@@ -46,15 +41,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"An individual query criterion is an instance of the interface <literal>org."
-"hibernate.criterion.Criterion</literal>. The class <literal>org.hibernate."
-"criterion.Restrictions</literal> defines factory methods for obtaining "
-"certain built-in <literal>Criterion</literal> types."
-msgstr ""
-"一个单独的查询条件是 <literal>org.hibernate.criterion.Criterion</literal> 接"
-"口的一个实例。<literal>org.hibernate.criterion.Restrictions</literal> 类定义"
-"了获得某些内置 <literal>Criterion</literal> 类型的工厂方法。"
+msgid "An individual query criterion is an instance of the interface <literal>org.hibernate.criterion.Criterion</literal>. The class <literal>org.hibernate.criterion.Restrictions</literal> defines factory methods for obtaining certain built-in <literal>Criterion</literal> types."
+msgstr "一个单独的查询条件是 <literal>org.hibernate.criterion.Criterion</literal> 接口的一个实例。<literal>org.hibernate.criterion.Restrictions</literal> 类定义了获得某些内置 <literal>Criterion</literal> 类型的工厂方法。"
#. Tag: para
#, no-c-format
@@ -63,30 +51,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"There are a range of built-in criterion types (<literal>Restrictions</"
-"literal> subclasses). One of the most useful allows you to specify SQL "
-"directly."
-msgstr ""
-"Hibernate 提供了相当多的内置 criterion 类型(<literal>Restrictions</literal> "
-"子类),但是尤其有用的是可以允许你直接使用 SQL。 "
+msgid "There are a range of built-in criterion types (<literal>Restrictions</literal> subclasses). One of the most useful allows you to specify SQL directly."
+msgstr "Hibernate 提供了相当多的内置 criterion 类型(<literal>Restrictions</literal> 子类),但是尤其有用的是可以允许你直接使用 SQL。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>{alias}</literal> placeholder with be replaced by the row alias "
-"of the queried entity."
+msgid "The <literal>{alias}</literal> placeholder with be replaced by the row alias of the queried entity."
msgstr "<literal>{alias}</literal> 占位符应当被替换为被查询实体的列别名。"
#. Tag: para
#, no-c-format
-msgid ""
-"You can also obtain a criterion from a <literal>Property</literal> instance. "
-"You can create a <literal>Property</literal> by calling <literal>Property."
-"forName()</literal>:"
-msgstr ""
-"<literal>Property</literal> 实例是获得一个条件的另外一种途径。你可以通过调用 "
-"<literal>Property.forName()</literal> 创建一个 <literal>Property</literal>:"
+msgid "You can also obtain a criterion from a <literal>Property</literal> instance. You can create a <literal>Property</literal> by calling <literal>Property.forName()</literal>:"
+msgstr "<literal>Property</literal> 实例是获得一个条件的另外一种途径。你可以通过调用 <literal>Property.forName()</literal> 创建一个 <literal>Property</literal>:"
#. Tag: title
#, no-c-format
@@ -95,12 +71,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can order the results using <literal>org.hibernate.criterion.Order</"
-"literal>."
-msgstr ""
-"你可以使用 <literal>org.hibernate.criterion.Order</literal> 来为查询结果排"
-"序。 "
+msgid "You can order the results using <literal>org.hibernate.criterion.Order</literal>."
+msgstr "你可以使用 <literal>org.hibernate.criterion.Order</literal> 来为查询结果排序。 "
#. Tag: title
#, no-c-format
@@ -109,85 +81,53 @@
#. Tag: para
#, no-c-format
-msgid ""
-"By navigating associations using <literal>createCriteria()</literal> you can "
-"specify constraints upon related entities:"
-msgstr ""
-"通过使用 <literal>createCriteria()</literal> 对关联进行导航,你可以指定相关实"
-"体的约束。"
+msgid "By navigating associations using <literal>createCriteria()</literal> you can specify constraints upon related entities:"
+msgstr "通过使用 <literal>createCriteria()</literal> 对关联进行导航,你可以指定相关实体的约束。"
#. Tag: para
#, no-c-format
-msgid ""
-"The second <literal>createCriteria()</literal> returns a new instance of "
-"<literal>Criteria</literal> that refers to the elements of the "
-"<literal>kittens</literal> collection."
-msgstr ""
-"注意第二个 <literal>createCriteria()</literal> 返回一个新的 "
-"<literal>Criteria</literal> 实例,该实例引用 <literal>kittens</literal> 集合"
-"中的元素。 "
+msgid "The second <literal>createCriteria()</literal> returns a new instance of <literal>Criteria</literal> that refers to the elements of the <literal>kittens</literal> collection."
+msgstr "注意第二个 <literal>createCriteria()</literal> 返回一个新的 <literal>Criteria</literal> 实例,该实例引用 <literal>kittens</literal> 集合中的元素。 "
#. Tag: para
#, no-c-format
-msgid ""
-"There is also an alternate form that is useful in certain circumstances:"
+msgid "There is also an alternate form that is useful in certain circumstances:"
msgstr "接下来,替换形态在某些情况下也是很有用的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"(<literal>createAlias()</literal> does not create a new instance of "
-"<literal>Criteria</literal>.)"
-msgstr ""
-"(<literal>createAlias()</literal> 并不创建一个新的 <literal>Criteria</"
-"literal> 实例。)"
+msgid "(<literal>createAlias()</literal> does not create a new instance of <literal>Criteria</literal>.)"
+msgstr "(<literal>createAlias()</literal> 并不创建一个新的 <literal>Criteria</literal> 实例。)"
#. Tag: para
#, no-c-format
-msgid ""
-"The kittens collections held by the <literal>Cat</literal> instances "
-"returned by the previous two queries are <emphasis>not</emphasis> pre-"
-"filtered by the criteria. If you want to retrieve just the kittens that "
-"match the criteria, you must use a <literal>ResultTransformer</literal>."
-msgstr ""
-"<literal>Cat</literal> 实例所保存的之前两次查询所返回的 kittens 集合是 "
-"<emphasis>没有</emphasis>被条件预过滤的。如果你希望只获得符合条件的 kittens,"
-"你必须使用 <literal>ResultTransformer</literal>。 "
+msgid "The kittens collections held by the <literal>Cat</literal> instances returned by the previous two queries are <emphasis>not</emphasis> pre-filtered by the criteria. If you want to retrieve just the kittens that match the criteria, you must use a <literal>ResultTransformer</literal>."
+msgstr "<literal>Cat</literal> 实例所保存的之前两次查询所返回的 kittens 集合是 <emphasis>没有</emphasis>被条件预过滤的。如果你希望只获得符合条件的 kittens,你必须使用 <literal>ResultTransformer</literal>。 "
#. Tag: para
#, no-c-format
msgid "Additionally you may manipulate the result set using a left outer join:"
-msgstr ""
+msgstr "此外,你可以用一个 left outer join 来操纵结果集:"
#. Tag: para
#, no-c-format
-msgid ""
-"This will return all of the <literal>Cat</literal>s with a mate whose name "
-"starts with \"good\" ordered by their mate's age, and all cats who do not "
-"have a mate. This is useful when there is a need to order or limit in the "
-"database prior to returning complex/large result sets, and removes many "
-"instances where multiple queries would have to be performed and the results "
-"unioned by java in memory."
-msgstr ""
+msgid "This will return all of the <literal>Cat</literal>s with a mate whose name starts with \"good\" ordered by their mate's age, and all cats who do not have a mate. This is useful when there is a need to order or limit in the database prior to returning complex/large result sets, and removes many instances where multiple queries would have to be performed and the results unioned by java in memory."
+msgstr "这将返回配偶的名字以 \"good\" 起始的所有 <literal>Cat</literal>,并根据其配偶的年龄进行排序。当需要在返回复杂/大型结果集前进行排序或限制、在多个查询必须执行且结果通过 Java 在内存里组合从而删除许多实例时,这很有用。"
#. Tag: para
#, no-c-format
-msgid ""
-"Without this feature, first all of the cats without a mate would need to be "
-"loaded in one query."
-msgstr ""
+msgid "Without this feature, first all of the cats without a mate would need to be loaded in one query."
+msgstr "如果没有这个功能,那么没有配偶的猫就需要在一次查询里进行加载。"
#. Tag: para
#, no-c-format
-msgid ""
-"A second query would need to retreive the cats with mates who's name started "
-"with \"good\" sorted by the mates age."
-msgstr ""
+msgid "A second query would need to retreive the cats with mates who's name started with \"good\" sorted by the mates age."
+msgstr "第二个查询将需要获取配偶名以 \"good\" 起始并按照配偶年龄排序的猫。"
#. Tag: para
#, no-c-format
msgid "Thirdly, in memory; the lists would need to be joined manually."
-msgstr ""
+msgstr "第三点,列表需要在内存中进行手工联合。"
#. Tag: title
#, no-c-format
@@ -196,22 +136,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can specify association fetching semantics at runtime using "
-"<literal>setFetchMode()</literal>."
-msgstr ""
-"你可以使用 <literal>setFetchMode()</literal> 在运行时定义动态关联抓取的语"
-"义。 "
+msgid "You can specify association fetching semantics at runtime using <literal>setFetchMode()</literal>."
+msgstr "你可以使用 <literal>setFetchMode()</literal> 在运行时定义动态关联抓取的语义。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"This query will fetch both <literal>mate</literal> and <literal>kittens</"
-"literal> by outer join. See <xref linkend=\"performance-fetching\" /> for "
-"more information."
-msgstr ""
-"这个查询可以通过外连接抓取 <literal>mate</literal> 和 <literal>kittens</"
-"literal>。查看 <xref linkend=\"performance-fetching\" /> 可以获得更多信息。"
+#, no-c-format
+msgid "This query will fetch both <literal>mate</literal> and <literal>kittens</literal> by outer join. See <xref linkend=\"performance-fetching\" /> for more information."
+msgstr "这个查询可以通过外连接抓取 <literal>mate</literal> 和 <literal>kittens</literal>。查看 <xref linkend=\"performance-fetching\" /> 可以获得更多信息。 "
#. Tag: title
#, no-c-format
@@ -220,18 +151,12 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The class <literal>org.hibernate.criterion.Example</literal> allows you to "
-"construct a query criterion from a given instance."
-msgstr ""
-"<literal>org.hibernate.criterion.Example</literal> 类允许你通过一个给定实例构"
-"建一个条件查询。"
+msgid "The class <literal>org.hibernate.criterion.Example</literal> allows you to construct a query criterion from a given instance."
+msgstr "<literal>org.hibernate.criterion.Example</literal> 类允许你通过一个给定实例构建一个条件查询。"
#. Tag: para
#, no-c-format
-msgid ""
-"Version properties, identifiers and associations are ignored. By default, "
-"null valued properties are excluded."
+msgid "Version properties, identifiers and associations are ignored. By default, null valued properties are excluded."
msgstr "版本属性、标识符和关联被忽略。默认情况下值为 null 的属性将被排除。"
#. Tag: para
@@ -251,53 +176,27 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The class <literal>org.hibernate.criterion.Projections</literal> is a "
-"factory for <literal>Projection</literal> instances. You can apply a "
-"projection to a query by calling <literal>setProjection()</literal>."
-msgstr ""
-"<literal>org.hibernate.criterion.Projections</literal> 是 "
-"<literal>Projection</literal> 的实例工厂。我们通过调用 <literal>setProjection"
-"()</literal> 应用投影到一个查询。 "
+msgid "The class <literal>org.hibernate.criterion.Projections</literal> is a factory for <literal>Projection</literal> instances. You can apply a projection to a query by calling <literal>setProjection()</literal>."
+msgstr "<literal>org.hibernate.criterion.Projections</literal> 是 <literal>Projection</literal> 的实例工厂。我们通过调用 <literal>setProjection()</literal> 应用投影到一个查询。 "
#. Tag: para
#, no-c-format
-msgid ""
-"There is no explicit \"group by\" necessary in a criteria query. Certain "
-"projection types are defined to be <emphasis>grouping projections</"
-"emphasis>, which also appear in the SQL <literal>group by</literal> clause."
-msgstr ""
-"在一个条件查询中没有必要显式的使用 \"group by\" 。某些投影类型就是被定义为"
-"<emphasis>分组投影</emphasis>,他们也出现在 SQL 的 <literal>group by</"
-"literal> 子句中。"
+msgid "There is no explicit \"group by\" necessary in a criteria query. Certain projection types are defined to be <emphasis>grouping projections</emphasis>, which also appear in the SQL <literal>group by</literal> clause."
+msgstr "在一个条件查询中没有必要显式的使用 \"group by\" 。某些投影类型就是被定义为<emphasis>分组投影</emphasis>,他们也出现在 SQL 的 <literal>group by</literal> 子句中。"
#. Tag: para
#, no-c-format
-msgid ""
-"An alias can be assigned to a projection so that the projected value can be "
-"referred to in restrictions or orderings. Here are two different ways to do "
-"this:"
-msgstr ""
-"你可以选择把一个别名指派给一个投影,这样可以使投影值被约束或排序所引用。下面"
-"是两种不同的实现方式: "
+msgid "An alias can be assigned to a projection so that the projected value can be referred to in restrictions or orderings. Here are two different ways to do this:"
+msgstr "你可以选择把一个别名指派给一个投影,这样可以使投影值被约束或排序所引用。下面是两种不同的实现方式: "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>alias()</literal> and <literal>as()</literal> methods simply "
-"wrap a projection instance in another, aliased, instance of "
-"<literal>Projection</literal>. As a shortcut, you can assign an alias when "
-"you add the projection to a projection list:"
-msgstr ""
-"<literal>alias()</literal> 和 <literal>as()</literal> 方法简便的将一个投影实"
-"例包装到另外一个 别名的 <literal>Projection</literal> 实例中。简而言之,当你"
-"添加一个投影到一个投影列表中时你可以为它指定一个别名:"
+msgid "The <literal>alias()</literal> and <literal>as()</literal> methods simply wrap a projection instance in another, aliased, instance of <literal>Projection</literal>. As a shortcut, you can assign an alias when you add the projection to a projection list:"
+msgstr "<literal>alias()</literal> 和 <literal>as()</literal> 方法简便的将一个投影实例包装到另外一个 别名的 <literal>Projection</literal> 实例中。简而言之,当你添加一个投影到一个投影列表中时你可以为它指定一个别名:"
#. Tag: para
#, no-c-format
-msgid ""
-"You can also use <literal>Property.forName()</literal> to express "
-"projections:"
+msgid "You can also use <literal>Property.forName()</literal> to express projections:"
msgstr "你也可以使用 <literal>Property.forName()</literal> 来表示投影:"
#. Tag: title
@@ -307,24 +206,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>DetachedCriteria</literal> class allows you to create a query "
-"outside the scope of a session and then execute it using an arbitrary "
-"<literal>Session</literal>."
-msgstr ""
-"<literal>DetachedCriteria</literal> 类使你在一个 session 范围之外创建一个查"
-"询,并且可以使用任意的 <literal>Session</literal> 来执行它。 "
+msgid "The <literal>DetachedCriteria</literal> class allows you to create a query outside the scope of a session and then execute it using an arbitrary <literal>Session</literal>."
+msgstr "<literal>DetachedCriteria</literal> 类使你在一个 session 范围之外创建一个查询,并且可以使用任意的 <literal>Session</literal> 来执行它。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>DetachedCriteria</literal> can also be used to express a "
-"subquery. Criterion instances involving subqueries can be obtained via "
-"<literal>Subqueries</literal> or <literal>Property</literal>."
-msgstr ""
-"<literal>DetachedCriteria</literal> 也可以用以表示子查询。条件实例包含子查询"
-"可以通过 <literal>Subqueries</literal> 或者 <literal>Property</literal> 获"
-"得。 "
+msgid "A <literal>DetachedCriteria</literal> can also be used to express a subquery. Criterion instances involving subqueries can be obtained via <literal>Subqueries</literal> or <literal>Property</literal>."
+msgstr "<literal>DetachedCriteria</literal> 也可以用以表示子查询。条件实例包含子查询可以通过 <literal>Subqueries</literal> 或者 <literal>Property</literal> 获得。 "
#. Tag: para
#, no-c-format
@@ -338,45 +226,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"For most queries, including criteria queries, the query cache is not "
-"efficient because query cache invalidation occurs too frequently. However, "
-"there is a special kind of query where you can optimize the cache "
-"invalidation algorithm: lookups by a constant natural key. In some "
-"applications, this kind of query occurs frequently. The criteria API "
-"provides special provision for this use case."
-msgstr ""
-"对大多数查询,包括条件查询而言,因为查询缓存的失效(invalidation)发生得太频"
-"繁,查询缓存不是非常高效。然而,有一种特别的查询,可以通过不变的自然键优化缓"
-"存的失效算法。在某些应用中,这种类型的查询比较常见。条件查询 API 对这种用例提"
-"供了特别规约。 "
+msgid "For most queries, including criteria queries, the query cache is not efficient because query cache invalidation occurs too frequently. However, there is a special kind of query where you can optimize the cache invalidation algorithm: lookups by a constant natural key. In some applications, this kind of query occurs frequently. The criteria API provides special provision for this use case."
+msgstr "对大多数查询,包括条件查询而言,因为查询缓存的失效(invalidation)发生得太频繁,查询缓存不是非常高效。然而,有一种特别的查询,可以通过不变的自然键优化缓存的失效算法。在某些应用中,这种类型的查询比较常见。条件查询 API 对这种用例提供了特别规约。 "
#. Tag: para
#, no-c-format
-msgid ""
-"First, map the natural key of your entity using <literal><natural-id></"
-"literal> and enable use of the second-level cache."
-msgstr ""
-"首先,你应该对你的 entity 使用 <literal><natural-id></literal> 来映射自"
-"然键,然后打开第二级缓存。 "
+msgid "First, map the natural key of your entity using <literal><natural-id></literal> and enable use of the second-level cache."
+msgstr "首先,你应该对你的 entity 使用 <literal><natural-id></literal> 来映射自然键,然后打开第二级缓存。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This functionality is not intended for use with entities with "
-"<emphasis>mutable</emphasis> natural keys."
-msgstr ""
-"注意,此功能对具有<emphasis>mutable</emphasis>自然键的 entity 并不适用。 "
+msgid "This functionality is not intended for use with entities with <emphasis>mutable</emphasis> natural keys."
+msgstr "注意,此功能对具有<emphasis>mutable</emphasis>自然键的 entity 并不适用。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Once you have enabled the Hibernate query cache, the <literal>Restrictions."
-"naturalId()</literal> allows you to make use of the more efficient cache "
-"algorithm."
-msgstr ""
-"现在,我们可以用 <literal>Restrictions.naturalId()</literal> 来使用更加高效的"
-"缓存算法。 "
+msgid "Once you have enabled the Hibernate query cache, the <literal>Restrictions.naturalId()</literal> allows you to make use of the more efficient cache algorithm."
+msgstr "现在,我们可以用 <literal>Restrictions.naturalId()</literal> 来使用更加高效的缓存算法。 "
#, fuzzy
#~ msgid ""
@@ -884,3 +750,4 @@
#~ " .set(\"org\", \"hb\") \n"
#~ " ).setCacheable(true)\n"
#~ " .uniqueResult();"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_hql.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_hql.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_hql.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:15\n"
-"PO-Revision-Date: 2009-12-07 21:13+1000\n"
+"PO-Revision-Date: 2010-03-16 10:03+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,14 +21,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate uses a powerful query language (HQL) that is similar in appearance "
-"to SQL. Compared with SQL, however, HQL is fully object-oriented and "
-"understands notions like inheritance, polymorphism and association."
-msgstr ""
-"Hibernate 配备了一种非常强大的查询语言,这种语言看上去很像 SQL。但是不要被语"
-"法结构上的相似所迷惑,HQL 是非常有意识的被设计为完全面向对象的查询,它可以理"
-"解如继承、多态和关联之类的概念。 "
+msgid "Hibernate uses a powerful query language (HQL) that is similar in appearance to SQL. Compared with SQL, however, HQL is fully object-oriented and understands notions like inheritance, polymorphism and association."
+msgstr "Hibernate 配备了一种非常强大的查询语言,这种语言看上去很像 SQL。但是不要被语法结构上的相似所迷惑,HQL 是非常有意识的被设计为完全面向对象的查询,它可以理解如继承、多态和关联之类的概念。 "
#. Tag: title
#, no-c-format
@@ -37,29 +31,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"With the exception of names of Java classes and properties, queries are case-"
-"insensitive. So <literal>SeLeCT</literal> is the same as <literal>sELEct</"
-"literal> is the same as <literal>SELECT</literal>, but <literal>org."
-"hibernate.eg.FOO</literal> is not <literal>org.hibernate.eg.Foo</literal>, "
-"and <literal>foo.barSet</literal> is not <literal>foo.BARSET</literal>."
-msgstr ""
-"除了 Java 类与属性的名称外,查询语句对大小写并不敏感。 所以 <literal>SeLeCT</"
-"literal> 与 <literal>sELEct</literal> 以及 <literal>SELECT</literal> 是相同"
-"的,但是 <literal>org.hibernate.eg.FOO</literal> 并不等价于 <literal>org."
-"hibernate.eg.Foo</literal> 并且 <literal>foo.barSet</literal> 也不等价于 "
-"<literal>foo.BARSET</literal>。 "
+msgid "With the exception of names of Java classes and properties, queries are case-insensitive. So <literal>SeLeCT</literal> is the same as <literal>sELEct</literal> is the same as <literal>SELECT</literal>, but <literal>org.hibernate.eg.FOO</literal> is not <literal>org.hibernate.eg.Foo</literal>, and <literal>foo.barSet</literal> is not <literal>foo.BARSET</literal>."
+msgstr "除了 Java 类与属性的名称外,查询语句对大小写并不敏感。 所以 <literal>SeLeCT</literal> 与 <literal>sELEct</literal> 以及 <literal>SELECT</literal> 是相同的,但是 <literal>org.hibernate.eg.FOO</literal> 并不等价于 <literal>org.hibernate.eg.Foo</literal> 并且 <literal>foo.barSet</literal> 也不等价于 <literal>foo.BARSET</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This manual uses lowercase HQL keywords. Some users find queries with "
-"uppercase keywords more readable, but this convention is unsuitable for "
-"queries embedded in Java code."
-msgstr ""
-"本手册中的 HQL 关键字将使用小写字母。很多用户发现使用完全大写的关键字会使查询"
-"语句的可读性更强,但我们发现,当把查询语句嵌入到 Java 语句中的时候使用大写关"
-"键字比较难看。"
+msgid "This manual uses lowercase HQL keywords. Some users find queries with uppercase keywords more readable, but this convention is unsuitable for queries embedded in Java code."
+msgstr "本手册中的 HQL 关键字将使用小写字母。很多用户发现使用完全大写的关键字会使查询语句的可读性更强,但我们发现,当把查询语句嵌入到 Java 语句中的时候使用大写关键字比较难看。"
#. Tag: title
#, no-c-format
@@ -73,52 +51,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This returns all instances of the class <literal>eg.Cat</literal>. You do "
-"not usually need to qualify the class name, since <literal>auto-import</"
-"literal> is the default. For example:"
-msgstr ""
-"该子句简单的返回 <literal>eg.Cat</literal> 类的所有实例。通常我们不需要使用类"
-"的全限定名,因为 <literal>auto-import</literal>(自动引入)是缺省的情况。所"
-"以我们几乎只使用如下的简单写法: "
+msgid "This returns all instances of the class <literal>eg.Cat</literal>. You do not usually need to qualify the class name, since <literal>auto-import</literal> is the default. For example:"
+msgstr "该子句简单的返回 <literal>eg.Cat</literal> 类的所有实例。通常我们不需要使用类的全限定名,因为 <literal>auto-import</literal>(自动引入)是缺省的情况。所以我们几乎只使用如下的简单写法: "
#. Tag: para
#, no-c-format
-msgid ""
-"In order to refer to the <literal>Cat</literal> in other parts of the query, "
-"you will need to assign an <emphasis>alias</emphasis>. For example:"
-msgstr ""
-"为了在这个查询的其他部分里引用 <literal>Cat</literal>,你将需要分配一个"
-"<emphasis>别名</emphasis>。例如:"
+msgid "In order to refer to the <literal>Cat</literal> in other parts of the query, you will need to assign an <emphasis>alias</emphasis>. For example:"
+msgstr "为了在这个查询的其他部分里引用 <literal>Cat</literal>,你将需要分配一个<emphasis>别名</emphasis>。例如:"
#. Tag: para
#, no-c-format
-msgid ""
-"This query assigns the alias <literal>cat</literal> to <literal>Cat</"
-"literal> instances, so you can use that alias later in the query. The "
-"<literal>as</literal> keyword is optional. You could also write:"
-msgstr ""
-"这个语句把别名 <literal>cat</literal> 指定给类<literal>Cat</literal> 的实例,"
-"这样我们就可以在随后的查询中使用此别名了。关键字 <literal>as</literal> 是可选"
-"的,我们也可以这样写: "
+msgid "This query assigns the alias <literal>cat</literal> to <literal>Cat</literal> instances, so you can use that alias later in the query. The <literal>as</literal> keyword is optional. You could also write:"
+msgstr "这个语句把别名 <literal>cat</literal> 指定给类<literal>Cat</literal> 的实例,这样我们就可以在随后的查询中使用此别名了。关键字 <literal>as</literal> 是可选的,我们也可以这样写: "
#. Tag: para
#, no-c-format
-msgid ""
-"Multiple classes can appear, resulting in a cartesian product or \"cross\" "
-"join."
-msgstr ""
-"子句中可以同时出现多个类,其查询结果是产生一个笛卡儿积或产生跨表的连接。 "
+msgid "Multiple classes can appear, resulting in a cartesian product or \"cross\" join."
+msgstr "子句中可以同时出现多个类,其查询结果是产生一个笛卡儿积或产生跨表的连接。 "
#. Tag: para
#, no-c-format
-msgid ""
-"It is good practice to name query aliases using an initial lowercase as this "
-"is consistent with Java naming standards for local variables (e.g. "
-"<literal>domesticCat</literal>)."
-msgstr ""
-"查询语句中别名的开头部分小写被认为是实践中的好习惯,这样做与 Java 变量的命名"
-"标准保持了一致(比如,<literal>domesticCat</literal>)。 "
+msgid "It is good practice to name query aliases using an initial lowercase as this is consistent with Java naming standards for local variables (e.g. <literal>domesticCat</literal>)."
+msgstr "查询语句中别名的开头部分小写被认为是实践中的好习惯,这样做与 Java 变量的命名标准保持了一致(比如,<literal>domesticCat</literal>)。 "
#. Tag: title
#, no-c-format
@@ -127,12 +81,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also assign aliases to associated entities or to elements of a "
-"collection of values using a <literal>join</literal>. For example:"
-msgstr ""
-"我们也可以为相关联的实体甚至是对一个集合中的全部元素指定一个别名,这时要使用"
-"关键字 <literal>join</literal>。 "
+msgid "You can also assign aliases to associated entities or to elements of a collection of values using a <literal>join</literal>. For example:"
+msgstr "我们也可以为相关联的实体甚至是对一个集合中的全部元素指定一个别名,这时要使用关键字 <literal>join</literal>。 "
#. Tag: para
#, no-c-format
@@ -161,91 +111,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>inner join</literal>, <literal>left outer join</literal> and "
-"<literal>right outer join</literal> constructs may be abbreviated."
-msgstr ""
-"语句 <literal>inner join</literal>,<literal>left outer join</literal> 以及 "
-"<literal>right outer join</literal> 可以简写。"
+msgid "The <literal>inner join</literal>, <literal>left outer join</literal> and <literal>right outer join</literal> constructs may be abbreviated."
+msgstr "语句 <literal>inner join</literal>,<literal>left outer join</literal> 以及 <literal>right outer join</literal> 可以简写。"
#. Tag: para
#, no-c-format
-msgid ""
-"You may supply extra join conditions using the HQL <literal>with</literal> "
-"keyword."
-msgstr ""
-"通过 HQL 的 <literal>with</literal> 关键字,你可以提供额外的 join 条件。"
+msgid "You may supply extra join conditions using the HQL <literal>with</literal> keyword."
+msgstr "通过 HQL 的 <literal>with</literal> 关键字,你可以提供额外的 join 条件。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"A \"fetch\" join allows associations or collections of values to be "
-"initialized along with their parent objects using a single select. This is "
-"particularly useful in the case of a collection. It effectively overrides "
-"the outer join and lazy declarations of the mapping file for associations "
-"and collections. See <xref linkend=\"performance-fetching\" /> for more "
-"information."
-msgstr ""
-"还有,一个 \"fetch\" 连接允许仅仅使用一个选择语句就将相关联的对象或一组值的集"
-"合随着他们的父对象的初始化而被初始化,这种方法在使用到集合的情况下尤其有用,"
-"对于关联和集合来说,它有效的代替了映射文件中的外联接与延迟声明(lazy "
-"declarations)。查看 <xref linkend=\"performance-fetching\" /> 以获得等多的信"
-"息。"
+#, no-c-format
+msgid "A \"fetch\" join allows associations or collections of values to be initialized along with their parent objects using a single select. This is particularly useful in the case of a collection. It effectively overrides the outer join and lazy declarations of the mapping file for associations and collections. See <xref linkend=\"performance-fetching\" /> for more information."
+msgstr "还有,一个 \"fetch\" 连接允许仅仅使用一个选择语句就将相关联的对象或一组值的集合随着他们的父对象的初始化而被初始化,这种方法在使用到集合的情况下尤其有用,对于关联和集合来说,它有效的代替了映射文件中的外联接与延迟声明(lazy declarations)。查看 <xref linkend=\"performance-fetching\" /> 以获得等多的信息。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A fetch join does not usually need to assign an alias, because the "
-"associated objects should not be used in the <literal>where</literal> clause "
-"(or any other clause). The associated objects are also not returned directly "
-"in the query results. Instead, they may be accessed via the parent object. "
-"The only reason you might need an alias is if you are recursively join "
-"fetching a further collection:"
-msgstr ""
-"一个 fetch 连接通常不需要被指定别名,因为相关联的对象不应当被用在 "
-"<literal>where</literal> 子句(或其它任何子句)中。同时,相关联的对象并不在查"
-"询的结果中直接返回,但可以通过他们的父对象来访问到他们。 "
+msgid "A fetch join does not usually need to assign an alias, because the associated objects should not be used in the <literal>where</literal> clause (or any other clause). The associated objects are also not returned directly in the query results. Instead, they may be accessed via the parent object. The only reason you might need an alias is if you are recursively join fetching a further collection:"
+msgstr "一个 fetch 连接通常不需要被指定别名,因为相关联的对象不应当被用在 <literal>where</literal> 子句(或其它任何子句)中。同时,相关联的对象并不在查询的结果中直接返回,但可以通过他们的父对象来访问到他们。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>fetch</literal> construct cannot be used in queries called "
-"using <literal>iterate()</literal> (though <literal>scroll()</literal> can "
-"be used). <literal>Fetch</literal> should be used together with "
-"<literal>setMaxResults()</literal> or <literal>setFirstResult()</literal>, "
-"as these operations are based on the result rows which usually contain "
-"duplicates for eager collection fetching, hence, the number of rows is not "
-"what you would expect. <literal>Fetch</literal> should also not be used "
-"together with impromptu <literal>with</literal> condition. It is possible to "
-"create a cartesian product by join fetching more than one collection in a "
-"query, so take care in this case. Join fetching multiple collection roles "
-"can produce unexpected results for bag mappings, so user discretion is "
-"advised when formulating queries in this case. Finally, note that "
-"<literal>full join fetch</literal> and <literal>right join fetch</literal> "
-"are not meaningful."
-msgstr ""
-"假若使用 <literal>iterate()</literal> 来调用查询,请注意 <literal>fetch</"
-"literal> 构造是不能使用的(<literal>scroll()</literal> 可以使用)。"
-"<literal>fetch</literal> 也不应该与 <literal>setMaxResults()</literal> 或 "
-"<literal>setFirstResult()</literal> 共用,这是因为这些操作是基于结果集的,而"
-"在预先抓取集合类时可能包含重复的数据,也就是说无法预先知道精确的行数。"
-"<literal>fetch</literal> 还不能与独立的 <literal>with</literal> 条件一起使"
-"用。通过在一次查询中 fetch 多个集合,可以制造出笛卡尔积,因此请多加注意。对 "
-"bag 映射来说,同时 join fetch 多个集合角色可能在某些情况下给出并非预期的结"
-"果,也请小心。最后注意,使用 <literal>full join fetch</literal> 与 "
-"<literal>right join fetch</literal> 是没有意义的。 "
+msgid "The <literal>fetch</literal> construct cannot be used in queries called using <literal>iterate()</literal> (though <literal>scroll()</literal> can be used). <literal>Fetch</literal> should be used together with <literal>setMaxResults()</literal> or <literal>setFirstResult()</literal>, as these operations are based on the result rows which usually contain duplicates for eager collection fetching, hence, the number of rows is not what you would expect. <literal>Fetch</literal> should also not be used together with impromptu <literal>with</literal> condition. It is possible to create a cartesian product by join fetching more than one collection in a query, so take care in this case. Join fetching multiple collection roles can produce unexpected results for bag mappings, so user discretion is advised when formulating queries in this case. Finally, note that <literal>full join fetch</literal> and <literal>right join fetch</literal> are not meaningful."
+msgstr "假若使用 <literal>iterate()</literal> 来调用查询,请注意 <literal>fetch</literal> 构造是不能使用的(<literal>scroll()</literal> 可以使用)。<literal>fetch</literal> 也不应该与 <literal>setMaxResults()</literal> 或 <literal>setFirstResult()</literal> 共用,这是因为这些操作是基于结果集的,而在预先抓取集合类时可能包含重复的数据,也就是说无法预先知道精确的行数。<literal>fetch</literal> 还不能与独立的 <literal>with</literal> 条件一起使用。通过在一次查询中 fetch 多个集合,可以制造出笛卡尔积,因此请多加注意。对 bag 映射来说,同时 join fetch 多个集合角色可能在某些情况下给出并非预期的结果,也请小心。最后注意,使用 <literal>full join fetch</literal> 与 <literal>right join fetch</literal> 是没有意义的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you are using property-level lazy fetching (with bytecode "
-"instrumentation), it is possible to force Hibernate to fetch the lazy "
-"properties in the first query immediately using <literal>fetch all "
-"properties</literal>."
-msgstr ""
-"如果你使用属性级别的延迟获取(lazy fetching)(这是通过重新编写字节码实现"
-"的),可以使用 <literal>fetch all properties</literal> 来强制 Hibernate 立即"
-"取得那些原本需要延迟加载的属性(在第一个查询中)。 "
+msgid "If you are using property-level lazy fetching (with bytecode instrumentation), it is possible to force Hibernate to fetch the lazy properties in the first query immediately using <literal>fetch all properties</literal>."
+msgstr "如果你使用属性级别的延迟获取(lazy fetching)(这是通过重新编写字节码实现的),可以使用 <literal>fetch all properties</literal> 来强制 Hibernate 立即取得那些原本需要延迟加载的属性(在第一个查询中)。 "
#. Tag: title
#, no-c-format
@@ -254,35 +146,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"HQL supports two forms of association joining: <literal>implicit</literal> "
-"and <literal>explicit</literal>."
-msgstr ""
-"HQL 支持两种关联 join 的形式:<literal>implicit(隐式)</literal> 与 "
-"<literal>explicit(显式)</literal>。"
+msgid "HQL supports two forms of association joining: <literal>implicit</literal> and <literal>explicit</literal>."
+msgstr "HQL 支持两种关联 join 的形式:<literal>implicit(隐式)</literal> 与 <literal>explicit(显式)</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"The queries shown in the previous section all use the <literal>explicit</"
-"literal> form, that is, where the join keyword is explicitly used in the "
-"from clause. This is the recommended form."
-msgstr ""
-"上一节中给出的查询都是使用 <literal>explicit(显式)</literal>形式的,其中 "
-"form 子句中明确给出了 join 关键字。这是建议使用的方式。 "
+msgid "The queries shown in the previous section all use the <literal>explicit</literal> form, that is, where the join keyword is explicitly used in the from clause. This is the recommended form."
+msgstr "上一节中给出的查询都是使用 <literal>explicit(显式)</literal>形式的,其中 form 子句中明确给出了 join 关键字。这是建议使用的方式。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>implicit</literal> form does not use the join keyword. Instead, "
-"the associations are \"dereferenced\" using dot-notation. <literal>implicit</"
-"literal> joins can appear in any of the HQL clauses. <literal>implicit</"
-"literal> join result in inner joins in the resulting SQL statement."
-msgstr ""
-"<literal>implicit(隐式)</literal>形式不使用 join 关键字。关联使用\"点号\"来"
-"进行“引用”。<literal>implicit</literal> join 可以在任何 HQL 子句中出现。"
-"<literal>implicit</literal> join 在最终的 SQL 语句中以 inner join 的方式出"
-"现。"
+msgid "The <literal>implicit</literal> form does not use the join keyword. Instead, the associations are \"dereferenced\" using dot-notation. <literal>implicit</literal> joins can appear in any of the HQL clauses. <literal>implicit</literal> join result in inner joins in the resulting SQL statement."
+msgstr "<literal>implicit(隐式)</literal>形式不使用 join 关键字。关联使用\"点号\"来进行“引用”。<literal>implicit</literal> join 可以在任何 HQL 子句中出现。<literal>implicit</literal> join 在最终的 SQL 语句中以 inner join 的方式出现。"
#. Tag: title
#, no-c-format
@@ -296,48 +171,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The special property (lowercase) <literal>id</literal> may be used to "
-"reference the identifier property of an entity <emphasis>provided that the "
-"entity does not define a non-identifier property named id</emphasis>."
-msgstr ""
-"特殊属性(lowercase)<literal>id</literal> 可以用来引用实体的 identifier 属"
-"性 <emphasis>假设这个实体没有定义用 non-identifier 属性命名的 id</"
-"emphasis>。 "
+msgid "The special property (lowercase) <literal>id</literal> may be used to reference the identifier property of an entity <emphasis>provided that the entity does not define a non-identifier property named id</emphasis>."
+msgstr "特殊属性(lowercase)<literal>id</literal> 可以用来引用实体的 identifier 属性 <emphasis>假设这个实体没有定义用 non-identifier 属性命名的 id</emphasis>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If the entity defines a named identifier property, you can use that property "
-"name."
+msgid "If the entity defines a named identifier property, you can use that property name."
msgstr "如果这个实体定义了 identifier 属性,你可以使用属性名。 "
#. Tag: para
#, no-c-format
-msgid ""
-"References to composite identifier properties follow the same naming rules. "
-"If the entity has a non-identifier property named id, the composite "
-"identifier property can only be referenced by its defined named. Otherwise, "
-"the special <literal>id</literal> property can be used to reference the "
-"identifier property."
-msgstr ""
-"对组合 identifier 属性的引用遵循相同的命名规则。如果实体有一个 non-"
-"identifier 属性命名的 id,这个组合 identifier 属性只能用自己定义的名字来引"
-"用;否则,特殊 <literal>id</literal> 属性可以用来引用 identifier 属性。 "
+msgid "References to composite identifier properties follow the same naming rules. If the entity has a non-identifier property named id, the composite identifier property can only be referenced by its defined named. Otherwise, the special <literal>id</literal> property can be used to reference the identifier property."
+msgstr "对组合 identifier 属性的引用遵循相同的命名规则。如果实体有一个 non-identifier 属性命名的 id,这个组合 identifier 属性只能用自己定义的名字来引用;否则,特殊 <literal>id</literal> 属性可以用来引用 identifier 属性。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Please note that, starting in version 3.2.2, this has changed significantly. "
-"In previous versions, <literal>id</literal> <emphasis>always</emphasis> "
-"referred to the identifier property regardless of its actual name. A "
-"ramification of that decision was that non-identifier properties named "
-"<literal>id</literal> could never be referenced in Hibernate queries."
-msgstr ""
-"注意:从 3.2.2 版本开始,这已经改变了很多。在前面的版本里,不管实际的名字,"
-"<literal>id</literal> <emphasis>总是</emphasis>指向 identifier 属性;而用 "
-"non-identifier 属性命名的 <literal>id</literal> 就从来不在 Hibernate 查询里引"
-"用。 "
+msgid "Please note that, starting in version 3.2.2, this has changed significantly. In previous versions, <literal>id</literal> <emphasis>always</emphasis> referred to the identifier property regardless of its actual name. A ramification of that decision was that non-identifier properties named <literal>id</literal> could never be referenced in Hibernate queries."
+msgstr "注意:从 3.2.2 版本开始,这已经改变了很多。在前面的版本里,不管实际的名字,<literal>id</literal> <emphasis>总是</emphasis>指向 identifier 属性;而用 non-identifier 属性命名的 <literal>id</literal> 就从来不在 Hibernate 查询里引用。 "
#. Tag: title
#, no-c-format
@@ -346,39 +196,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>select</literal> clause picks which objects and properties to "
-"return in the query result set. Consider the following:"
-msgstr ""
-"<literal>select</literal> 子句选择将哪些对象与属性返回到查询结果集中。考虑如"
-"下情况:"
+msgid "The <literal>select</literal> clause picks which objects and properties to return in the query result set. Consider the following:"
+msgstr "<literal>select</literal> 子句选择将哪些对象与属性返回到查询结果集中。考虑如下情况:"
#. Tag: para
#, no-c-format
-msgid ""
-"The query will select <literal>mate</literal>s of other <literal>Cat</"
-"literal>s. You can express this query more compactly as:"
-msgstr ""
-"该语句将选择其它 <literal>Cat</literal> 的 <literal>mate</literal>(其他猫的"
-"配偶)。实际上,你可以更简洁的用以下的查询语句表达相同的含义: "
+msgid "The query will select <literal>mate</literal>s of other <literal>Cat</literal>s. You can express this query more compactly as:"
+msgstr "该语句将选择其它 <literal>Cat</literal> 的 <literal>mate</literal>(其他猫的配偶)。实际上,你可以更简洁的用以下的查询语句表达相同的含义: "
#. Tag: para
#, no-c-format
-msgid ""
-"Queries can return properties of any value type including properties of "
-"component type:"
-msgstr ""
-"查询语句可以返回值为任何类型的属性,包括返回类型为某种组件(Component)的属"
-"性: "
+msgid "Queries can return properties of any value type including properties of component type:"
+msgstr "查询语句可以返回值为任何类型的属性,包括返回类型为某种组件(Component)的属性: "
#. Tag: para
#, no-c-format
-msgid ""
-"Queries can return multiple objects and/or properties as an array of type "
-"<literal>Object[]</literal>:"
-msgstr ""
-"查询语句可以返回多个对象和(或)属性,存放在 <literal>Object[]</literal> 队列"
-"中, "
+msgid "Queries can return multiple objects and/or properties as an array of type <literal>Object[]</literal>:"
+msgstr "查询语句可以返回多个对象和(或)属性,存放在 <literal>Object[]</literal> 队列中, "
#. Tag: para
#, no-c-format
@@ -387,33 +221,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Or - assuming that the class <literal>Family</literal> has an appropriate "
-"constructor - as an actual typesafe Java object:"
-msgstr ""
-"假设类 <literal>Family</literal> 有一个合适的构造函数 - 作为实际的类型安全的 "
-"Java 对象:"
+msgid "Or - assuming that the class <literal>Family</literal> has an appropriate constructor - as an actual typesafe Java object:"
+msgstr "假设类 <literal>Family</literal> 有一个合适的构造函数 - 作为实际的类型安全的 Java 对象:"
#. Tag: para
#, no-c-format
-msgid ""
-"You can assign aliases to selected expressions using <literal>as</literal>:"
+msgid "You can assign aliases to selected expressions using <literal>as</literal>:"
msgstr "你可以使用关键字 <literal>as</literal> 给“被选择了的表达式”指派别名: "
#. Tag: para
#, no-c-format
-msgid ""
-"This is most useful when used together with <literal>select new map</"
-"literal>:"
+msgid "This is most useful when used together with <literal>select new map</literal>:"
msgstr "这种做法在与子句 <literal>select new map</literal> 一起使用时最有用:"
#. Tag: para
#, no-c-format
-msgid ""
-"This query returns a <literal>Map</literal> from aliases to selected values."
-msgstr ""
-"该查询返回了一个 <literal>Map</literal> 的对象,内容是别名与被选择的值组成的"
-"名-值映射。"
+msgid "This query returns a <literal>Map</literal> from aliases to selected values."
+msgstr "该查询返回了一个 <literal>Map</literal> 的对象,内容是别名与被选择的值组成的名-值映射。"
#. Tag: title
#, no-c-format
@@ -422,8 +246,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"HQL queries can even return the results of aggregate functions on properties:"
+msgid "HQL queries can even return the results of aggregate functions on properties:"
msgstr "HQL 查询甚至可以返回作用于属性之上的聚集函数的计算结果: "
#. Tag: para
@@ -432,9 +255,9 @@
msgstr "受支持的聚集函数如下: "
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>avg(...), sum(...), min(...), max(...)</literal>"
-msgstr "<literal>avg(...), sum(...), min(...), max(...)</literal>"
+msgstr "<literal>avg(...), sum(...), min(...), max(...)</literal> "
#. Tag: para
#, no-c-format
@@ -442,25 +265,19 @@
msgstr "<literal>count(*)</literal>"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>count(...), count(distinct ...), count(all...)</literal>"
msgstr "<literal>count(...), count(distinct ...), count(all...)</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"You can use arithmetic operators, concatenation, and recognized SQL "
-"functions in the select clause:"
+msgid "You can use arithmetic operators, concatenation, and recognized SQL functions in the select clause:"
msgstr "你可以在选择子句中使用数学操作符、连接以及经过验证的 SQL 函数: "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>distinct</literal> and <literal>all</literal> keywords can be "
-"used and have the same semantics as in SQL."
-msgstr ""
-"关键字 <literal>distinct</literal> 与 <literal>all</literal> 也可以使用,它们"
-"具有与 SQL 相同的语义。 "
+msgid "The <literal>distinct</literal> and <literal>all</literal> keywords can be used and have the same semantics as in SQL."
+msgstr "关键字 <literal>distinct</literal> 与 <literal>all</literal> 也可以使用,它们具有与 SQL 相同的语义。 "
#. Tag: title
#, no-c-format
@@ -474,38 +291,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"returns instances not only of <literal>Cat</literal>, but also of subclasses "
-"like <literal>DomesticCat</literal>. Hibernate queries can name "
-"<emphasis>any</emphasis> Java class or interface in the <literal>from</"
-"literal> clause. The query will return instances of all persistent classes "
-"that extend that class or implement the interface. The following query would "
-"return all persistent objects:"
-msgstr ""
-"不仅返回 <literal>Cat</literal> 类的实例,也同时返回子类 "
-"<literal>DomesticCat</literal> 的实例。Hibernate 可以在 <literal>from</"
-"literal> 子句中指定<emphasis>任何</emphasis> Java 类或接口。查询会返回继承了"
-"该类的所有持久化子类的实例或返回声明了该接口的所有持久化类的实例。下面的查询"
-"语句返回所有的被持久化的对象: "
+msgid "returns instances not only of <literal>Cat</literal>, but also of subclasses like <literal>DomesticCat</literal>. Hibernate queries can name <emphasis>any</emphasis> Java class or interface in the <literal>from</literal> clause. The query will return instances of all persistent classes that extend that class or implement the interface. The following query would return all persistent objects:"
+msgstr "不仅返回 <literal>Cat</literal> 类的实例,也同时返回子类 <literal>DomesticCat</literal> 的实例。Hibernate 可以在 <literal>from</literal> 子句中指定<emphasis>任何</emphasis> Java 类或接口。查询会返回继承了该类的所有持久化子类的实例或返回声明了该接口的所有持久化类的实例。下面的查询语句返回所有的被持久化的对象: "
#. Tag: para
#, no-c-format
-msgid ""
-"The interface <literal>Named</literal> might be implemented by various "
-"persistent classes:"
+msgid "The interface <literal>Named</literal> might be implemented by various persistent classes:"
msgstr "接口 <literal>Named</literal> 可能被各种各样的持久化类声明:"
#. Tag: para
#, no-c-format
-msgid ""
-"These last two queries will require more than one SQL <literal>SELECT</"
-"literal>. This means that the <literal>order by</literal> clause does not "
-"correctly order the whole result set. It also means you cannot call these "
-"queries using <literal>Query.scroll()</literal>."
-msgstr ""
-"注意,最后的两个查询将需要超过一个的 SQL <literal>SELECT</literal>。这表明 "
-"<literal>order by</literal> 子句没有对整个结果集进行正确的排序。(这也说明你"
-"不能对这样的查询使用 <literal>Query.scroll()</literal> 方法。)"
+msgid "These last two queries will require more than one SQL <literal>SELECT</literal>. This means that the <literal>order by</literal> clause does not correctly order the whole result set. It also means you cannot call these queries using <literal>Query.scroll()</literal>."
+msgstr "注意,最后的两个查询将需要超过一个的 SQL <literal>SELECT</literal>。这表明 <literal>order by</literal> 子句没有对整个结果集进行正确的排序。(这也说明你不能对这样的查询使用 <literal>Query.scroll()</literal> 方法。)"
#. Tag: title
#, no-c-format
@@ -514,12 +311,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>where</literal> clause allows you to refine the list of "
-"instances returned. If no alias exists, you can refer to properties by name:"
-msgstr ""
-"<literal>where</literal> 子句允许你将返回的实例列表的范围缩小。如果没有指定别"
-"名,你可以使用属性名来直接引用属性: "
+msgid "The <literal>where</literal> clause allows you to refine the list of instances returned. If no alias exists, you can refer to properties by name:"
+msgstr "<literal>where</literal> 子句允许你将返回的实例列表的范围缩小。如果没有指定别名,你可以使用属性名来直接引用属性: "
#. Tag: para
#, no-c-format
@@ -529,8 +322,7 @@
#. Tag: para
#, no-c-format
msgid "This returns instances of <literal>Cat</literal> named 'Fritz'."
-msgstr ""
-"返回名为(属性 name 等于)'Fritz' 的 <literal>Cat</literal> 类的实例。 "
+msgstr "返回名为(属性 name 等于)'Fritz' 的 <literal>Cat</literal> 类的实例。 "
#. Tag: para
#, no-c-format
@@ -539,25 +331,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"returns all instances of <literal>Foo</literal> with an instance of "
-"<literal>bar</literal> with a <literal>date</literal> property equal to the "
-"<literal>startDate</literal> property of the <literal>Foo</literal>. "
-"Compound path expressions make the <literal>where</literal> clause extremely "
-"powerful. Consider the following:"
-msgstr ""
-"将返回所有满足下面条件的 <literal>Foo</literal> 类的实例: 存在如下的 "
-"<literal>bar</literal> 的一个实例,其 <literal>date</literal> 属性等于 "
-"<literal>Foo</literal> 的 <literal>startDate</literal> 属性。复合路径表达式使"
-"得 <literal>where</literal> 子句非常的强大,考虑如下情况: "
+msgid "returns all instances of <literal>Foo</literal> with an instance of <literal>bar</literal> with a <literal>date</literal> property equal to the <literal>startDate</literal> property of the <literal>Foo</literal>. Compound path expressions make the <literal>where</literal> clause extremely powerful. Consider the following:"
+msgstr "将返回所有满足下面条件的 <literal>Foo</literal> 类的实例: 存在如下的 <literal>bar</literal> 的一个实例,其 <literal>date</literal> 属性等于 <literal>Foo</literal> 的 <literal>startDate</literal> 属性。复合路径表达式使得 <literal>where</literal> 子句非常的强大,考虑如下情况: "
#. Tag: para
#, no-c-format
-msgid ""
-"This query translates to an SQL query with a table (inner) join. For example:"
-msgstr ""
-"该查询将被翻译成为一个含有表连接(内连接)的 SQL 查询。如果你打算写像这样的查"
-"询语句:"
+msgid "This query translates to an SQL query with a table (inner) join. For example:"
+msgstr "该查询将被翻译成为一个含有表连接(内连接)的 SQL 查询。如果你打算写像这样的查询语句:"
#. Tag: para
#, no-c-format
@@ -566,21 +346,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>=</literal> operator can be used to compare not only "
-"properties, but also instances:"
-msgstr ""
-"<literal>=</literal> 运算符不仅可以被用来比较属性的值,也可以用来比较实例: "
+msgid "The <literal>=</literal> operator can be used to compare not only properties, but also instances:"
+msgstr "<literal>=</literal> 运算符不仅可以被用来比较属性的值,也可以用来比较实例: "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The special property (lowercase) <literal>id</literal> can be used to "
-"reference the unique identifier of an object. See <xref linkend=\"queryhql-"
-"identifier-property\" /> for more information."
-msgstr ""
-"特殊属性(小写)<literal>id</literal> 可以用来表示一个对象的唯一的标识符。详"
-"情请参考 <xref linkend=\"queryhql-identifier-property\" />。"
+#, no-c-format
+msgid "The special property (lowercase) <literal>id</literal> can be used to reference the unique identifier of an object. See <xref linkend=\"queryhql-identifier-property\" /> for more information."
+msgstr "特殊属性(小写)<literal>id</literal> 可以用来表示一个对象的唯一的标识符。详情请参考 <xref linkend=\"queryhql-identifier-property\" />。 "
#. Tag: para
#, no-c-format
@@ -589,14 +361,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Properties of composite identifiers can also be used. Consider the following "
-"example where <literal>Person</literal> has composite identifiers consisting "
-"of <literal>country</literal> and <literal>medicareNumber</literal>:"
-msgstr ""
-"同样也可以使用复合标识符。比如 <literal>Person</literal> 类有一个复合标识符,"
-"它由 <literal>country</literal> 属性与 <literal>medicareNumber</literal> 属性"
-"组成:"
+msgid "Properties of composite identifiers can also be used. Consider the following example where <literal>Person</literal> has composite identifiers consisting of <literal>country</literal> and <literal>medicareNumber</literal>:"
+msgstr "同样也可以使用复合标识符。比如 <literal>Person</literal> 类有一个复合标识符,它由 <literal>country</literal> 属性与 <literal>medicareNumber</literal> 属性组成:"
#. Tag: para
#, no-c-format
@@ -604,57 +370,29 @@
msgstr "第二个查询也不需要进行表连接。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"See <xref linkend=\"queryhql-identifier-property\" /> for more information "
-"regarding referencing identifier properties)"
-msgstr ""
-"关于引用标识符属性的更多信息,请参考 <xref linkend=\"queryhql-identifier-"
-"property\" />。"
+#, no-c-format
+msgid "See <xref linkend=\"queryhql-identifier-property\" /> for more information regarding referencing identifier properties)"
+msgstr "关于引用标识符属性的更多信息,请参考 <xref linkend=\"queryhql-identifier-property\" />。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The special property <literal>class</literal> accesses the discriminator "
-"value of an instance in the case of polymorphic persistence. A Java class "
-"name embedded in the where clause will be translated to its discriminator "
-"value."
-msgstr ""
-"同样的,特殊属性 <literal>class</literal> 在进行多态持久化的情况下被用来存取"
-"一个实例的鉴别值(discriminator value)。一个嵌入到 where 子句中的 Java 类的"
-"名字将被转换为该类的鉴别值。 "
+msgid "The special property <literal>class</literal> accesses the discriminator value of an instance in the case of polymorphic persistence. A Java class name embedded in the where clause will be translated to its discriminator value."
+msgstr "同样的,特殊属性 <literal>class</literal> 在进行多态持久化的情况下被用来存取一个实例的鉴别值(discriminator value)。一个嵌入到 where 子句中的 Java 类的名字将被转换为该类的鉴别值。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"You can also use components or composite user types, or properties of said "
-"component types. See <xref linkend=\"queryhql-components\" /> for more "
-"information."
-msgstr ""
-"你也可以使用组件或者复合用户类型,以及组件类型的属性。详情请参考 <xref "
-"linkend=\"queryhql-components\" />。"
+#, no-c-format
+msgid "You can also use components or composite user types, or properties of said component types. See <xref linkend=\"queryhql-components\" /> for more information."
+msgstr "你也可以使用组件或者复合用户类型,以及组件类型的属性。详情请参考 <xref linkend=\"queryhql-components\" />。 "
#. Tag: para
#, no-c-format
-msgid ""
-"An \"any\" type has the special properties <literal>id</literal> and "
-"<literal>class</literal> that allows you to express a join in the following "
-"way (where <literal>AuditLog.item</literal> is a property mapped with "
-"<literal><any></literal>):"
-msgstr ""
-"一个“任意”类型有两个特殊的属性 <literal>id</literal> 和 <literal>class</"
-"literal>,来允许我们按照下面的方式表达一个连接(<literal>AuditLog.item</"
-"literal> 是一个属性,该属性被映射为 <literal><any></literal>)。 "
+msgid "An \"any\" type has the special properties <literal>id</literal> and <literal>class</literal> that allows you to express a join in the following way (where <literal>AuditLog.item</literal> is a property mapped with <literal><any></literal>):"
+msgstr "一个“任意”类型有两个特殊的属性 <literal>id</literal> 和 <literal>class</literal>,来允许我们按照下面的方式表达一个连接(<literal>AuditLog.item</literal> 是一个属性,该属性被映射为 <literal><any></literal>)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>log.item.class</literal> and <literal>payment.class</literal> "
-"would refer to the values of completely different database columns in the "
-"above query."
-msgstr ""
-"注意,在上面的查询与句中,<literal>log.item.class</literal> 和 "
-"<literal>payment.class</literal> 将涉及到完全不同的数据库中的列。"
+msgid "The <literal>log.item.class</literal> and <literal>payment.class</literal> would refer to the values of completely different database columns in the above query."
+msgstr "注意,在上面的查询与句中,<literal>log.item.class</literal> 和 <literal>payment.class</literal> 将涉及到完全不同的数据库中的列。"
#. Tag: title
#, no-c-format
@@ -663,12 +401,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Expressions used in the <literal>where</literal> clause include the "
-"following:"
-msgstr ""
-"在 <literal>where</literal> 子句中允许使用的表达式包括 大多数你可以在 SQL 使"
-"用的表达式种类: "
+msgid "Expressions used in the <literal>where</literal> clause include the following:"
+msgstr "在 <literal>where</literal> 子句中允许使用的表达式包括 大多数你可以在 SQL 使用的表达式种类: "
#. Tag: para
#, no-c-format
@@ -677,11 +411,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"binary comparison operators: <literal>=, >=, <=, <>, !=, like</"
-"literal>"
-msgstr ""
-"二进制比较运算符 <literal>=, >=, <=, <>, !=, like</literal> "
+msgid "binary comparison operators: <literal>=, >=, <=, <>, !=, like</literal>"
+msgstr "二进制比较运算符 <literal>=, >=, <=, <>, !=, like</literal> "
#. Tag: para
#, no-c-format
@@ -695,64 +426,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>in</literal>, <literal>not in</literal>, <literal>between</"
-"literal>, <literal>is null</literal>, <literal>is not null</literal>, "
-"<literal>is empty</literal>, <literal>is not empty</literal>, "
-"<literal>member of</literal> and <literal>not member of</literal>"
-msgstr ""
-"<literal>in</literal>, <literal>not in</literal>, <literal>between</"
-"literal>, <literal>is null</literal>, <literal>is not null</literal>, "
-"<literal>is empty</literal>, <literal>is not empty</literal>, "
-"<literal>member of</literal> and <literal>not member of</literal>"
+msgid "<literal>in</literal>, <literal>not in</literal>, <literal>between</literal>, <literal>is null</literal>, <literal>is not null</literal>, <literal>is empty</literal>, <literal>is not empty</literal>, <literal>member of</literal> and <literal>not member of</literal>"
+msgstr "<literal>in</literal>, <literal>not in</literal>, <literal>between</literal>, <literal>is null</literal>, <literal>is not null</literal>, <literal>is empty</literal>, <literal>is not empty</literal>, <literal>member of</literal> and <literal>not member of</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"\"Simple\" case, <literal>case ... when ... then ... else ... end</literal>, "
-"and \"searched\" case, <literal>case when ... then ... else ... end</literal>"
-msgstr ""
-"\"Simple\" case, <literal>case ... when ... then ... else ... end</literal>, "
-"and \"searched\" case, <literal>case when ... then ... else ... end</literal>"
+msgid "\"Simple\" case, <literal>case ... when ... then ... else ... end</literal>, and \"searched\" case, <literal>case when ... then ... else ... end</literal>"
+msgstr "\"Simple\" case, <literal>case ... when ... then ... else ... end</literal>, and \"searched\" case, <literal>case when ... then ... else ... end</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"string concatenation <literal>...||...</literal> or <literal>concat(...,...)"
-"</literal>"
-msgstr ""
-"字符串连接符 <literal>...||...</literal> or <literal>concat(...,...)</"
-"literal>"
+msgid "string concatenation <literal>...||...</literal> or <literal>concat(...,...)</literal>"
+msgstr "字符串连接符 <literal>...||...</literal> or <literal>concat(...,...)</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>current_date()</literal>, <literal>current_time()</literal>, and "
-"<literal>current_timestamp()</literal>"
-msgstr ""
-"<literal>current_date()</literal>, <literal>current_time()</literal>, and "
-"<literal>current_timestamp()</literal>"
+msgid "<literal>current_date()</literal>, <literal>current_time()</literal>, and <literal>current_timestamp()</literal>"
+msgstr "<literal>current_date()</literal>, <literal>current_time()</literal>, and <literal>current_timestamp()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>second(...)</literal>, <literal>minute(...)</literal>, <literal>hour"
-"(...)</literal>, <literal>day(...)</literal>, <literal>month(...)</literal>, "
-"and <literal>year(...)</literal>"
-msgstr ""
-"<literal>second(...)</literal>、<literal>minute(...)</literal>、<literal>hour"
-"(...)</literal>、<literal>day(...)</literal>、<literal>month(...)</literal> "
-"和 <literal>year(...)</literal>"
+msgid "<literal>second(...)</literal>, <literal>minute(...)</literal>, <literal>hour(...)</literal>, <literal>day(...)</literal>, <literal>month(...)</literal>, and <literal>year(...)</literal>"
+msgstr "<literal>second(...)</literal>、<literal>minute(...)</literal>、<literal>hour(...)</literal>、<literal>day(...)</literal>、<literal>month(...)</literal> 和 <literal>year(...)</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"Any function or operator defined by EJB-QL 3.0: <literal>substring(), trim"
-"(), lower(), upper(), length(), locate(), abs(), sqrt(), bit_length(), mod()"
-"</literal>"
-msgstr ""
-"EJB-QL 3.0 定义的任何功能或操作符:<literal>substring(), trim(), lower(), "
-"upper(), length(), locate(), abs(), sqrt(), bit_length(), mod()</literal>"
+msgid "Any function or operator defined by EJB-QL 3.0: <literal>substring(), trim(), lower(), upper(), length(), locate(), abs(), sqrt(), bit_length(), mod()</literal>"
+msgstr "EJB-QL 3.0 定义的任何功能或操作符:<literal>substring(), trim(), lower(), upper(), length(), locate(), abs(), sqrt(), bit_length(), mod()</literal>"
#. Tag: para
#, no-c-format
@@ -761,53 +461,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>str()</literal> for converting numeric or temporal values to a "
-"readable string"
+msgid "<literal>str()</literal> for converting numeric or temporal values to a readable string"
msgstr "<literal>str()</literal> 把数字或者时间值转换为可读的字符串"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>cast(... as ...)</literal>, where the second argument is the name "
-"of a Hibernate type, and <literal>extract(... from ...)</literal> if ANSI "
-"<literal>cast()</literal> and <literal>extract()</literal> is supported by "
-"the underlying database"
-msgstr ""
-"<literal>cast(... as ...)</literal>,其第二个参数是某 Hibernate 类型的名字,"
-"以及 <literal>extract(... from ...)</literal>,只要 ANSI <literal>cast()</"
-"literal> 和 <literal>extract()</literal> 被底层数据库支持"
+msgid "<literal>cast(... as ...)</literal>, where the second argument is the name of a Hibernate type, and <literal>extract(... from ...)</literal> if ANSI <literal>cast()</literal> and <literal>extract()</literal> is supported by the underlying database"
+msgstr "<literal>cast(... as ...)</literal>,其第二个参数是某 Hibernate 类型的名字,以及 <literal>extract(... from ...)</literal>,只要 ANSI <literal>cast()</literal> 和 <literal>extract()</literal> 被底层数据库支持"
#. Tag: para
#, no-c-format
-msgid ""
-"the HQL <literal>index()</literal> function, that applies to aliases of a "
-"joined indexed collection"
+msgid "the HQL <literal>index()</literal> function, that applies to aliases of a joined indexed collection"
msgstr "HQL <literal>index()</literal> 函数,作用于 join 的有序集合的别名。"
#. Tag: para
#, no-c-format
-msgid ""
-"HQL functions that take collection-valued path expressions: <literal>size(), "
-"minelement(), maxelement(), minindex(), maxindex()</literal>, along with the "
-"special <literal>elements()</literal> and <literal>indices</literal> "
-"functions that can be quantified using <literal>some, all, exists, any, in</"
-"literal>."
-msgstr ""
-"HQL 函数,把集合作为参数:<literal>size(), minelement(), maxelement(), "
-"minindex(), maxindex()</literal>,还有特别的 <literal>elements()</literal> "
-"和 <literal>indices</literal> 函数,可以与数量词加以限定:<literal>some, "
-"all, exists, any, in</literal>。 "
+msgid "HQL functions that take collection-valued path expressions: <literal>size(), minelement(), maxelement(), minindex(), maxindex()</literal>, along with the special <literal>elements()</literal> and <literal>indices</literal> functions that can be quantified using <literal>some, all, exists, any, in</literal>."
+msgstr "HQL 函数,把集合作为参数:<literal>size(), minelement(), maxelement(), minindex(), maxindex()</literal>,还有特别的 <literal>elements()</literal> 和 <literal>indices</literal> 函数,可以与数量词加以限定:<literal>some, all, exists, any, in</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Any database-supported SQL scalar function like <literal>sign()</literal>, "
-"<literal>trunc()</literal>, <literal>rtrim()</literal>, and <literal>sin()</"
-"literal>"
-msgstr ""
-"任何数据库支持的 SQL 标量函数,比如 <literal>sign()</literal>, <literal>trunc"
-"()</literal>, <literal>rtrim()</literal>, <literal>sin()</literal> "
+msgid "Any database-supported SQL scalar function like <literal>sign()</literal>, <literal>trunc()</literal>, <literal>rtrim()</literal>, and <literal>sin()</literal>"
+msgstr "任何数据库支持的 SQL 标量函数,比如 <literal>sign()</literal>, <literal>trunc()</literal>, <literal>rtrim()</literal>, <literal>sin()</literal> "
#. Tag: para
#, no-c-format
@@ -816,38 +491,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"named parameters <literal>:name</literal>, <literal>:start_date</literal>, "
-"and <literal>:x1</literal>"
-msgstr ""
-"命名参数 <literal>:name</literal>,<literal>:start_date</literal>,<literal>:"
-"x1</literal> "
+msgid "named parameters <literal>:name</literal>, <literal>:start_date</literal>, and <literal>:x1</literal>"
+msgstr "命名参数 <literal>:name</literal>,<literal>:start_date</literal>,<literal>:x1</literal> "
#. Tag: para
#, no-c-format
-msgid ""
-"SQL literals <literal>'foo'</literal>, <literal>69</literal>, <literal>6.66E"
-"+2</literal>, <literal>'1970-01-01 10:00:01.0'</literal>"
-msgstr ""
-"SQL 直接常量 <literal>'foo'</literal>, <literal>69</literal>, <literal>6.66E"
-"+2</literal>, <literal>'1970-01-01 10:00:01.0'</literal>"
+msgid "SQL literals <literal>'foo'</literal>, <literal>69</literal>, <literal>6.66E+2</literal>, <literal>'1970-01-01 10:00:01.0'</literal>"
+msgstr "SQL 直接常量 <literal>'foo'</literal>, <literal>69</literal>, <literal>6.66E+2</literal>, <literal>'1970-01-01 10:00:01.0'</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"Java <literal>public static final</literal> constants <literal>eg.Color."
-"TABBY</literal>"
-msgstr ""
-"Java <literal>public static final</literal> 类型的常量 <literal>eg.Color."
-"TABBY</literal>"
+msgid "Java <literal>public static final</literal> constants <literal>eg.Color.TABBY</literal>"
+msgstr "Java <literal>public static final</literal> 类型的常量 <literal>eg.Color.TABBY</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>in</literal> and <literal>between</literal> can be used as follows:"
-msgstr ""
-"关键字 <literal>in</literal> 与 <literal>between</literal> 可按如下方法使"
-"用: "
+msgid "<literal>in</literal> and <literal>between</literal> can be used as follows:"
+msgstr "关键字 <literal>in</literal> 与 <literal>between</literal> 可按如下方法使用: "
#. Tag: para
#, no-c-format
@@ -856,106 +516,53 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Similarly, <literal>is null</literal> and <literal>is not null</literal> can "
-"be used to test for null values."
-msgstr ""
-"同样,子句 <literal>is null</literal> 与 <literal>is not null</literal> 可以"
-"被用来测试空值(null)。 "
+msgid "Similarly, <literal>is null</literal> and <literal>is not null</literal> can be used to test for null values."
+msgstr "同样,子句 <literal>is null</literal> 与 <literal>is not null</literal> 可以被用来测试空值(null)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Booleans can be easily used in expressions by declaring HQL query "
-"substitutions in Hibernate configuration:"
-msgstr ""
-"在 Hibernate 配置文件中声明 HQL“查询替代(query substitutions)”之后,布尔表"
-"达式(Booleans)可以在其他表达式中轻松的使用: "
+msgid "Booleans can be easily used in expressions by declaring HQL query substitutions in Hibernate configuration:"
+msgstr "在 Hibernate 配置文件中声明 HQL“查询替代(query substitutions)”之后,布尔表达式(Booleans)可以在其他表达式中轻松的使用: "
#. Tag: para
#, no-c-format
-msgid ""
-"This will replace the keywords <literal>true</literal> and <literal>false</"
-"literal> with the literals <literal>1</literal> and <literal>0</literal> in "
-"the translated SQL from this HQL:"
-msgstr ""
-"系统将该 HQL 转换为 SQL 语句时,该设置表明将用字符 <literal>1</literal> 和 "
-"<literal>0</literal> 来取代关键字 <literal>true</literal> 和 <literal>false</"
-"literal>:"
+msgid "This will replace the keywords <literal>true</literal> and <literal>false</literal> with the literals <literal>1</literal> and <literal>0</literal> in the translated SQL from this HQL:"
+msgstr "系统将该 HQL 转换为 SQL 语句时,该设置表明将用字符 <literal>1</literal> 和 <literal>0</literal> 来取代关键字 <literal>true</literal> 和 <literal>false</literal>:"
#. Tag: para
#, no-c-format
-msgid ""
-"You can test the size of a collection with the special property "
-"<literal>size</literal> or the special <literal>size()</literal> function."
-msgstr ""
-"你可以用特殊属性 <literal>size</literal>,或是特殊函数 <literal>size()</"
-"literal> 测试一个集合的大小。 "
+msgid "You can test the size of a collection with the special property <literal>size</literal> or the special <literal>size()</literal> function."
+msgstr "你可以用特殊属性 <literal>size</literal>,或是特殊函数 <literal>size()</literal> 测试一个集合的大小。 "
#. Tag: para
#, no-c-format
-msgid ""
-"For indexed collections, you can refer to the minimum and maximum indices "
-"using <literal>minindex</literal> and <literal>maxindex</literal> functions. "
-"Similarly, you can refer to the minimum and maximum elements of a collection "
-"of basic type using the <literal>minelement</literal> and "
-"<literal>maxelement</literal> functions. For example:"
-msgstr ""
-"对于索引了(有序)的集合,你可以使用 <literal>minindex</literal> 与 "
-"<literal>maxindex</literal> 函数来引用到最小与最大的索引序数。同理,你可以使"
-"用 <literal>minelement</literal> 与 <literal>maxelement</literal> 函数来引用"
-"到一个基本数据类型的集合中最小与最大的元素。例如:"
+msgid "For indexed collections, you can refer to the minimum and maximum indices using <literal>minindex</literal> and <literal>maxindex</literal> functions. Similarly, you can refer to the minimum and maximum elements of a collection of basic type using the <literal>minelement</literal> and <literal>maxelement</literal> functions. For example:"
+msgstr "对于索引了(有序)的集合,你可以使用 <literal>minindex</literal> 与 <literal>maxindex</literal> 函数来引用到最小与最大的索引序数。同理,你可以使用 <literal>minelement</literal> 与 <literal>maxelement</literal> 函数来引用到一个基本数据类型的集合中最小与最大的元素。例如:"
#. Tag: para
#, no-c-format
-msgid ""
-"The SQL functions <literal>any, some, all, exists, in</literal> are "
-"supported when passed the element or index set of a collection "
-"(<literal>elements</literal> and <literal>indices</literal> functions) or "
-"the result of a subquery (see below):"
-msgstr ""
-"在传递一个集合的索引集或者是元素集(<literal>elements</literal> 与 "
-"<literal>indices</literal> 函数)或者传递一个子查询的结果的时候,可以使用 "
-"SQL 函数 <literal>any, some,all, exists, in</literal>:"
+msgid "The SQL functions <literal>any, some, all, exists, in</literal> are supported when passed the element or index set of a collection (<literal>elements</literal> and <literal>indices</literal> functions) or the result of a subquery (see below):"
+msgstr "在传递一个集合的索引集或者是元素集(<literal>elements</literal> 与 <literal>indices</literal> 函数)或者传递一个子查询的结果的时候,可以使用 SQL 函数 <literal>any, some,all, exists, in</literal>:"
#. Tag: para
#, no-c-format
-msgid ""
-"Note that these constructs - <literal>size</literal>, <literal>elements</"
-"literal>, <literal>indices</literal>, <literal>minindex</literal>, "
-"<literal>maxindex</literal>, <literal>minelement</literal>, "
-"<literal>maxelement</literal> - can only be used in the where clause in "
-"Hibernate3."
-msgstr ""
-"注意,在 Hibernate3 中,这些结构变量 — <literal>size</literal>,"
-"<literal>elements</literal>,<literal>indices</literal>,<literal>minindex</"
-"literal>,<literal>maxindex</literal>,<literal>minelement</literal>,"
-"<literal>maxelement</literal> — 只能在 where 子句中使用。 "
+msgid "Note that these constructs - <literal>size</literal>, <literal>elements</literal>, <literal>indices</literal>, <literal>minindex</literal>, <literal>maxindex</literal>, <literal>minelement</literal>, <literal>maxelement</literal> - can only be used in the where clause in Hibernate3."
+msgstr "注意,在 Hibernate3 中,这些结构变量 — <literal>size</literal>,<literal>elements</literal>,<literal>indices</literal>,<literal>minindex</literal>,<literal>maxindex</literal>,<literal>minelement</literal>,<literal>maxelement</literal> — 只能在 where 子句中使用。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Elements of indexed collections (arrays, lists, and maps) can be referred to "
-"by index in a where clause only:"
-msgstr ""
-"一个被索引过的(有序的)集合的元素(arrays,lists,maps)可以在其他索引中被引"
-"用(只能在 where 子句中): "
+msgid "Elements of indexed collections (arrays, lists, and maps) can be referred to by index in a where clause only:"
+msgstr "一个被索引过的(有序的)集合的元素(arrays,lists,maps)可以在其他索引中被引用(只能在 where 子句中): "
#. Tag: para
#, no-c-format
-msgid ""
-"The expression inside <literal>[]</literal> can even be an arithmetic "
-"expression:"
+msgid "The expression inside <literal>[]</literal> can even be an arithmetic expression:"
msgstr "在 <literal>[]</literal> 中的表达式甚至可以是一个算数表达式:"
#. Tag: para
#, no-c-format
-msgid ""
-"HQL also provides the built-in <literal>index()</literal> function for "
-"elements of a one-to-many association or collection of values."
-msgstr ""
-"对于一个一对多的关联(one-to-many association)或是值的集合中的元素,HQL 也提"
-"供内建的 <literal>index()</literal> 函数。"
+msgid "HQL also provides the built-in <literal>index()</literal> function for elements of a one-to-many association or collection of values."
+msgstr "对于一个一对多的关联(one-to-many association)或是值的集合中的元素,HQL 也提供内建的 <literal>index()</literal> 函数。"
#. Tag: para
#, no-c-format
@@ -964,12 +571,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Consider how much longer and less readable the following query would be in "
-"SQL:"
-msgstr ""
-"如果你还不能对所有的这些深信不疑,想想下面的查询。如果使用 SQL,语句长度会增"
-"长多少,可读性会下降多少: "
+msgid "Consider how much longer and less readable the following query would be in SQL:"
+msgstr "如果你还不能对所有的这些深信不疑,想想下面的查询。如果使用 SQL,语句长度会增长多少,可读性会下降多少: "
#. Tag: para
#, no-c-format
@@ -983,21 +586,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The list returned by a query can be ordered by any property of a returned "
-"class or components:"
-msgstr ""
-"查询返回的列表(list)可以按照一个返回的类或组件(components)中的任何属性"
-"(property)进行排序: "
+msgid "The list returned by a query can be ordered by any property of a returned class or components:"
+msgstr "查询返回的列表(list)可以按照一个返回的类或组件(components)中的任何属性(property)进行排序: "
#. Tag: para
#, no-c-format
-msgid ""
-"The optional <literal>asc</literal> or <literal>desc</literal> indicate "
-"ascending or descending order respectively."
-msgstr ""
-"可选的 <literal>asc</literal> 或 <literal>desc</literal> 关键字指明了按照升序"
-"或降序进行排序。"
+msgid "The optional <literal>asc</literal> or <literal>desc</literal> indicate ascending or descending order respectively."
+msgstr "可选的 <literal>asc</literal> 或 <literal>desc</literal> 关键字指明了按照升序或降序进行排序。"
#. Tag: title
#, no-c-format
@@ -1006,12 +601,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A query that returns aggregate values can be grouped by any property of a "
-"returned class or components:"
-msgstr ""
-"一个返回聚集值(aggregate values)的查询可以按照一个返回的类或组件"
-"(components)中的任何属性(property)进行分组: "
+msgid "A query that returns aggregate values can be grouped by any property of a returned class or components:"
+msgstr "一个返回聚集值(aggregate values)的查询可以按照一个返回的类或组件(components)中的任何属性(property)进行分组: "
#. Tag: para
#, no-c-format
@@ -1020,29 +611,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"SQL functions and aggregate functions are allowed in the <literal>having</"
-"literal> and <literal>order by</literal> clauses if they are supported by "
-"the underlying database (i.e., not in MySQL)."
-msgstr ""
-"如果底层的数据库支持的话(例如不能在 MySQL 中使用),SQL 的一般函数与聚集函数"
-"也可以出现在 <literal>having</literal> 与 <literal>order by</literal> 子句"
-"中。 "
+msgid "SQL functions and aggregate functions are allowed in the <literal>having</literal> and <literal>order by</literal> clauses if they are supported by the underlying database (i.e., not in MySQL)."
+msgstr "如果底层的数据库支持的话(例如不能在 MySQL 中使用),SQL 的一般函数与聚集函数也可以出现在 <literal>having</literal> 与 <literal>order by</literal> 子句中。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Neither the <literal>group by</literal> clause nor the <literal>order by</"
-"literal> clause can contain arithmetic expressions. Hibernate also does not "
-"currently expand a grouped entity, so you cannot write <literal>group by "
-"cat</literal> if all properties of <literal>cat</literal> are non-"
-"aggregated. You have to list all non-aggregated properties explicitly."
-msgstr ""
-"注意 <literal>group by</literal> 子句与 <literal>order by</literal> 子句中都"
-"不能包含算术表达式(arithmetic expressions)。也要注意 Hibernate 目前不会扩"
-"展 group 的实体,因此你不能写 <literal>group by cat</literal>,除非 "
-"<literal>cat</literal> 的所有属性都不是聚集的(non-aggregated)。你必须明确的"
-"列出所有的非聚集属性。 "
+msgid "Neither the <literal>group by</literal> clause nor the <literal>order by</literal> clause can contain arithmetic expressions. Hibernate also does not currently expand a grouped entity, so you cannot write <literal>group by cat</literal> if all properties of <literal>cat</literal> are non-aggregated. You have to list all non-aggregated properties explicitly."
+msgstr "注意 <literal>group by</literal> 子句与 <literal>order by</literal> 子句中都不能包含算术表达式(arithmetic expressions)。也要注意 Hibernate 目前不会扩展 group 的实体,因此你不能写 <literal>group by cat</literal>,除非 <literal>cat</literal> 的所有属性都不是聚集的(non-aggregated)。你必须明确的列出所有的非聚集属性。 "
#. Tag: title
#, no-c-format
@@ -1051,15 +626,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"For databases that support subselects, Hibernate supports subqueries within "
-"queries. A subquery must be surrounded by parentheses (often by an SQL "
-"aggregate function call). Even correlated subqueries (subqueries that refer "
-"to an alias in the outer query) are allowed."
-msgstr ""
-"对于支持子查询的数据库,Hibernate 支持在查询中使用子查询。一个子查询必须被圆"
-"括号包围起来(经常是 SQL 聚集函数的圆括号)。甚至相互关联的子查询(引用到外部"
-"查询中的别名的子查询)也是允许的。"
+msgid "For databases that support subselects, Hibernate supports subqueries within queries. A subquery must be surrounded by parentheses (often by an SQL aggregate function call). Even correlated subqueries (subqueries that refer to an alias in the outer query) are allowed."
+msgstr "对于支持子查询的数据库,Hibernate 支持在查询中使用子查询。一个子查询必须被圆括号包围起来(经常是 SQL 聚集函数的圆括号)。甚至相互关联的子查询(引用到外部查询中的别名的子查询)也是允许的。"
#. Tag: para
#, no-c-format
@@ -1067,14 +635,9 @@
msgstr "注意,HQL 自查询只可以在 select 或者 where 子句中出现。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Note that subqueries can also utilize <literal>row value constructor</"
-"literal> syntax. See <xref linkend=\"queryhql-tuple\" /> for more "
-"information."
-msgstr ""
-"注意子查询也可以利用 <literal>row value constructor</literal> 语法。请参考 "
-"<xref linkend=\"queryhql-tuple\" /> 来获得详情。 "
+#, no-c-format
+msgid "Note that subqueries can also utilize <literal>row value constructor</literal> syntax. See <xref linkend=\"queryhql-tuple\" /> for more information."
+msgstr "注意子查询也可以利用 <literal>row value constructor</literal> 语法。请参考 <xref linkend=\"queryhql-tuple\" /> 来获得详情。"
#. Tag: title
#, no-c-format
@@ -1083,90 +646,37 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate queries can be quite powerful and complex. In fact, the power of "
-"the query language is one of Hibernate's main strengths. The following "
-"example queries are similar to queries that have been used on recent "
-"projects. Please note that most queries you will write will be much simpler "
-"than the following examples."
-msgstr ""
-"Hibernate 查询可以非常的强大与复杂。实际上,Hibernate 的一个主要卖点就是查询"
-"语句的威力。这里有一些例子,它们与我在最近的一个项目中使用的查询非常相似。注"
-"意你能用到的大多数查询比这些要简单的多。"
+msgid "Hibernate queries can be quite powerful and complex. In fact, the power of the query language is one of Hibernate's main strengths. The following example queries are similar to queries that have been used on recent projects. Please note that most queries you will write will be much simpler than the following examples."
+msgstr "Hibernate 查询可以非常的强大与复杂。实际上,Hibernate 的一个主要卖点就是查询语句的威力。这里有一些例子,它们与我在最近的一个项目中使用的查询非常相似。注意你能用到的大多数查询比这些要简单的多。"
#. Tag: para
#, no-c-format
-msgid ""
-"The following query returns the order id, number of items, the given minimum "
-"total value and the total value of the order for all unpaid orders for a "
-"particular customer. The results are ordered by total value. In determining "
-"the prices, it uses the current catalog. The resulting SQL query, against "
-"the <literal>ORDER</literal>, <literal>ORDER_LINE</literal>, "
-"<literal>PRODUCT</literal>, <literal>CATALOG</literal> and <literal>PRICE</"
-"literal> tables has four inner joins and an (uncorrelated) subselect."
-msgstr ""
-"下面的查询对于某个特定的客户的所有未支付的账单,在给定给最小总价值的情况下,"
-"返回订单的 id,条目的数量和总价值,返回值按照总价值的结果进行排序。为了决定价"
-"格,查询使用了当前目录。作为转换结果的 SQL 查询,使用了<literal>ORDER</"
-"literal>,<literal>ORDER_LINE</literal>,<literal>PRODUCT</literal>,"
-"<literal>CATALOG</literal> 和 <literal>PRICE</literal> 库表。"
+msgid "The following query returns the order id, number of items, the given minimum total value and the total value of the order for all unpaid orders for a particular customer. The results are ordered by total value. In determining the prices, it uses the current catalog. The resulting SQL query, against the <literal>ORDER</literal>, <literal>ORDER_LINE</literal>, <literal>PRODUCT</literal>, <literal>CATALOG</literal> and <literal>PRICE</literal> tables has four inner joins and an (uncorrelated) subselect."
+msgstr "下面的查询对于某个特定的客户的所有未支付的账单,在给定给最小总价值的情况下,返回订单的 id,条目的数量和总价值,返回值按照总价值的结果进行排序。为了决定价格,查询使用了当前目录。作为转换结果的 SQL 查询,使用了<literal>ORDER</literal>,<literal>ORDER_LINE</literal>,<literal>PRODUCT</literal>,<literal>CATALOG</literal> 和 <literal>PRICE</literal> 库表。"
#. Tag: para
#, no-c-format
-msgid ""
-"What a monster! Actually, in real life, I'm not very keen on subqueries, so "
-"my query was really more like this:"
-msgstr ""
-"这简直是一个怪物!实际上,在现实生活中,我并不热衷于子查询,所以我的查询语句"
-"看起来更像这个:"
+msgid "What a monster! Actually, in real life, I'm not very keen on subqueries, so my query was really more like this:"
+msgstr "这简直是一个怪物!实际上,在现实生活中,我并不热衷于子查询,所以我的查询语句看起来更像这个:"
#. Tag: para
#, no-c-format
-msgid ""
-"The next query counts the number of payments in each status, excluding all "
-"payments in the <literal>AWAITING_APPROVAL</literal> status where the most "
-"recent status change was made by the current user. It translates to an SQL "
-"query with two inner joins and a correlated subselect against the "
-"<literal>PAYMENT</literal>, <literal>PAYMENT_STATUS</literal> and "
-"<literal>PAYMENT_STATUS_CHANGE</literal> tables."
-msgstr ""
-"下面一个查询计算每一种状态下的支付的数目,除去所有处于 "
-"<literal>AWAITING_APPROVAL</literal> 状态的支付,因为在该状态下 当前的用户作"
-"出了状态的最新改变。该查询被转换成含有两个内连接以及一个相关联的子选择的 SQL "
-"查询,该查询使用了表 <literal>PAYMENT</literal>,<literal>PAYMENT_STATUS</"
-"literal> 以及 <literal>PAYMENT_STATUS_CHANGE</literal>。"
+msgid "The next query counts the number of payments in each status, excluding all payments in the <literal>AWAITING_APPROVAL</literal> status where the most recent status change was made by the current user. It translates to an SQL query with two inner joins and a correlated subselect against the <literal>PAYMENT</literal>, <literal>PAYMENT_STATUS</literal> and <literal>PAYMENT_STATUS_CHANGE</literal> tables."
+msgstr "下面一个查询计算每一种状态下的支付的数目,除去所有处于 <literal>AWAITING_APPROVAL</literal> 状态的支付,因为在该状态下 当前的用户作出了状态的最新改变。该查询被转换成含有两个内连接以及一个相关联的子选择的 SQL 查询,该查询使用了表 <literal>PAYMENT</literal>,<literal>PAYMENT_STATUS</literal> 以及 <literal>PAYMENT_STATUS_CHANGE</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"If the <literal>statusChanges</literal> collection was mapped as a list, "
-"instead of a set, the query would have been much simpler to write."
-msgstr ""
-"如果我把 <literal>statusChanges</literal> 实例集映射为一个列表(list)而不是"
-"一个集合(set),书写查询语句将更加简单。 "
+msgid "If the <literal>statusChanges</literal> collection was mapped as a list, instead of a set, the query would have been much simpler to write."
+msgstr "如果我把 <literal>statusChanges</literal> 实例集映射为一个列表(list)而不是一个集合(set),书写查询语句将更加简单。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The next query uses the MS SQL Server <literal>isNull()</literal> function "
-"to return all the accounts and unpaid payments for the organization to which "
-"the current user belongs. It translates to an SQL query with three inner "
-"joins, an outer join and a subselect against the <literal>ACCOUNT</literal>, "
-"<literal>PAYMENT</literal>, <literal>PAYMENT_STATUS</literal>, "
-"<literal>ACCOUNT_TYPE</literal>, <literal>ORGANIZATION</literal> and "
-"<literal>ORG_USER</literal> tables."
-msgstr ""
-"下面一个查询使用了 MS SQL Server 的 <literal>isNull()</literal> 函数用以返回"
-"当前用户所属组织的组织帐号及组织未支付的账。它被转换成一个对表 "
-"<literal>ACCOUNT</literal>,<literal>PAYMENT</literal>,"
-"<literal>PAYMENT_STATUS</literal>,<literal>ACCOUNT_TYPE</literal>,"
-"<literal>ORGANIZATION</literal> 以及 <literal>ORG_USER</literal> 进行的三个"
-"内连接,一个外连接和一个子选择的 SQL 查询。"
+msgid "The next query uses the MS SQL Server <literal>isNull()</literal> function to return all the accounts and unpaid payments for the organization to which the current user belongs. It translates to an SQL query with three inner joins, an outer join and a subselect against the <literal>ACCOUNT</literal>, <literal>PAYMENT</literal>, <literal>PAYMENT_STATUS</literal>, <literal>ACCOUNT_TYPE</literal>, <literal>ORGANIZATION</literal> and <literal>ORG_USER</literal> tables."
+msgstr "下面一个查询使用了 MS SQL Server 的 <literal>isNull()</literal> 函数用以返回当前用户所属组织的组织帐号及组织未支付的账。它被转换成一个对表 <literal>ACCOUNT</literal>,<literal>PAYMENT</literal>,<literal>PAYMENT_STATUS</literal>,<literal>ACCOUNT_TYPE</literal>,<literal>ORGANIZATION</literal> 以及 <literal>ORG_USER</literal> 进行的三个内连接,一个外连接和一个子选择的 SQL 查询。"
#. Tag: para
#, no-c-format
-msgid ""
-"For some databases, we would need to do away with the (correlated) subselect."
+msgid "For some databases, we would need to do away with the (correlated) subselect."
msgstr "对于一些数据库,我们需要弃用(相关的)子选择。"
#. Tag: title
@@ -1175,15 +685,9 @@
msgstr "批量的 UPDATE 和 DELETE"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"HQL now supports <literal>update</literal>, <literal>delete</literal> and "
-"<literal>insert ... select ...</literal> statements. See <xref linkend="
-"\"batch-direct\" /> for more information."
-msgstr ""
-"HQL 现在支持 <literal>update</literal>,<literal>delete</literal> 和 "
-"<literal>insert ... select ...</literal>语句。查阅 <xref linkend=\"batch-"
-"direct\" /> 以获得更多信息。"
+#, no-c-format
+msgid "HQL now supports <literal>update</literal>, <literal>delete</literal> and <literal>insert ... select ...</literal> statements. See <xref linkend=\"batch-direct\" /> for more information."
+msgstr "HQL 现在支持 <literal>update</literal>,<literal>delete</literal> 和 <literal>insert ... select ...</literal>语句。查阅 <xref linkend=\"batch-direct\" /> 以获得更多信息。 "
#. Tag: title
#, no-c-format
@@ -1202,12 +706,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If your database supports subselects, you can place a condition upon "
-"selection size in the where clause of your query:"
-msgstr ""
-"如果你的数据库支持子选择,你可以在你的查询的 where 子句中为选择的大小"
-"(selection size)指定一个条件:"
+msgid "If your database supports subselects, you can place a condition upon selection size in the where clause of your query:"
+msgstr "如果你的数据库支持子选择,你可以在你的查询的 where 子句中为选择的大小(selection size)指定一个条件:"
#. Tag: para
#, no-c-format
@@ -1216,12 +716,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"As this solution cannot return a <literal>User</literal> with zero messages "
-"because of the inner join, the following form is also useful:"
-msgstr ""
-"因为内连接(inner join)的原因,这个解决方案不能返回含有零个信息的 "
-"<literal>User</literal> 类的实例,所以这种情况下使用下面的格式将是有帮助的: "
+msgid "As this solution cannot return a <literal>User</literal> with zero messages because of the inner join, the following form is also useful:"
+msgstr "因为内连接(inner join)的原因,这个解决方案不能返回含有零个信息的 <literal>User</literal> 类的实例,所以这种情况下使用下面的格式将是有帮助的: "
#. Tag: para
#, no-c-format
@@ -1230,18 +726,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Collections are pageable by using the <literal>Query</literal> interface "
-"with a filter:"
-msgstr ""
-"通过将接口 <literal>Query</literal> 与一个过滤器(filter)一起使用,集合"
-"(Collections)是可以分页的:"
+msgid "Collections are pageable by using the <literal>Query</literal> interface with a filter:"
+msgstr "通过将接口 <literal>Query</literal> 与一个过滤器(filter)一起使用,集合(Collections)是可以分页的:"
#. Tag: para
#, no-c-format
msgid "Collection elements can be ordered or grouped using a query filter:"
-msgstr ""
-"通过使用查询过滤器(query filter)可以将集合(Collection)的元素分组或排序: "
+msgstr "通过使用查询过滤器(query filter)可以将集合(Collection)的元素分组或排序: "
#. Tag: para
#, no-c-format
@@ -1255,22 +746,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Components can be used similarly to the simple value types that are used in "
-"HQL queries. They can appear in the <literal>select</literal> clause as "
-"follows:"
-msgstr ""
-"在 HQL 查询里,组件可以和简单值类型一样使用。它们可以出现在 <literal>select</"
-"literal> 子句里: "
+msgid "Components can be used similarly to the simple value types that are used in HQL queries. They can appear in the <literal>select</literal> clause as follows:"
+msgstr "在 HQL 查询里,组件可以和简单值类型一样使用。它们可以出现在 <literal>select</literal> 子句里: "
#. Tag: para
#, no-c-format
-msgid ""
-"where the Person's name property is a component. Components can also be used "
-"in the <literal>where</literal> clause:"
-msgstr ""
-"在这里,Person 的 name 属性是一个组件。组件也可以用在 <literal>where</"
-"literal> 子句里:"
+msgid "where the Person's name property is a component. Components can also be used in the <literal>where</literal> clause:"
+msgstr "在这里,Person 的 name 属性是一个组件。组件也可以用在 <literal>where</literal> 子句里:"
#. Tag: para
#, no-c-format
@@ -1278,13 +760,9 @@
msgstr "组件也可以用在 <literal>order by</literal> 子句里:"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Another common use of components is in <link linkend=\"queryhql-tuple\">row "
-"value constructors</link>."
-msgstr ""
-"组件的另外一个常见用法是在 <xref linkend=\"queryhql-tuple\" /> 行值(row "
-"value)构造函数里。"
+#, no-c-format
+msgid "Another common use of components is in <link linkend=\"queryhql-tuple\">row value constructors</link>."
+msgstr "组件的另外一个常见用法是在 <xref linkend=\"queryhql-tuple\" /> 行值(row value)构造函数里。 "
#. Tag: title
#, no-c-format
@@ -1293,52 +771,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"HQL supports the use of ANSI SQL <literal>row value constructor</literal> "
-"syntax, sometimes referred to AS <literal>tuple</literal> syntax, even "
-"though the underlying database may not support that notion. Here, we are "
-"generally referring to multi-valued comparisons, typically associated with "
-"components. Consider an entity Person which defines a name component:"
-msgstr ""
-"HQL 支持 ANSI SQL <literal>row value constructor</literal> 语法(有时也叫作 "
-"<literal>tuple</literal> 语法),即使底层数据库可能不支持这个概念。在这里我们"
-"通常指的是多值(multi-valued)的比较,典型地是和组件相关联。来看看一个定义了 "
-"name 组件的实体 Person:"
+msgid "HQL supports the use of ANSI SQL <literal>row value constructor</literal> syntax, sometimes referred to AS <literal>tuple</literal> syntax, even though the underlying database may not support that notion. Here, we are generally referring to multi-valued comparisons, typically associated with components. Consider an entity Person which defines a name component:"
+msgstr "HQL 支持 ANSI SQL <literal>row value constructor</literal> 语法(有时也叫作 <literal>tuple</literal> 语法),即使底层数据库可能不支持这个概念。在这里我们通常指的是多值(multi-valued)的比较,典型地是和组件相关联。来看看一个定义了 name 组件的实体 Person:"
#. Tag: para
#, no-c-format
-msgid ""
-"That is valid syntax although it is a little verbose. You can make this more "
-"concise by using <literal>row value constructor</literal> syntax:"
-msgstr ""
-"那是有效的语法,虽然有点冗长。我们可以使它更加简洁一点,并使用 <literal>row "
-"value constructor</literal> 语法: "
+msgid "That is valid syntax although it is a little verbose. You can make this more concise by using <literal>row value constructor</literal> syntax:"
+msgstr "那是有效的语法,虽然有点冗长。我们可以使它更加简洁一点,并使用 <literal>row value constructor</literal> 语法: "
#. Tag: para
#, no-c-format
-msgid ""
-"It can also be useful to specify this in the <literal>select</literal> "
-"clause:"
+msgid "It can also be useful to specify this in the <literal>select</literal> clause:"
msgstr "在 <literal>select</literal> 子句里指定这个也是很有用的:"
#. Tag: para
#, no-c-format
-msgid ""
-"Using <literal>row value constructor</literal> syntax can also be beneficial "
-"when using subqueries that need to compare against multiple values:"
-msgstr ""
-"当使用需要比较多个值的子查询时,采用 <literal>row value constructor</"
-"literal> 语法也很有用处: "
+msgid "Using <literal>row value constructor</literal> syntax can also be beneficial when using subqueries that need to compare against multiple values:"
+msgstr "当使用需要比较多个值的子查询时,采用 <literal>row value constructor</literal> 语法也很有用处: "
#. Tag: para
#, no-c-format
-msgid ""
-"One thing to consider when deciding if you want to use this syntax, is that "
-"the query will be dependent upon the ordering of the component sub-"
-"properties in the metadata."
-msgstr ""
-"决定是否使用这个语法的一件因素就是:这个查询将依赖于元数据里的组件子属性"
-"(sub-properties)的顺序。 "
+msgid "One thing to consider when deciding if you want to use this syntax, is that the query will be dependent upon the ordering of the component sub-properties in the metadata."
+msgstr "决定是否使用这个语法的一件因素就是:这个查询将依赖于元数据里的组件子属性(sub-properties)的顺序。 "
#, fuzzy
#~ msgid "<![CDATA[from eg.Cat]]>"
@@ -1381,10 +835,8 @@
#, fuzzy
#~ msgid "<![CDATA[from Formula form full join form.parameter param]]>"
#~ msgstr "from Formula form full join form.parameter param"
-
#~ msgid "inner join"
#~ msgstr "<literal>inner join</literal>(内连接)"
-
#~ msgid "left outer join"
#~ msgstr "<literal>left outer join</literal>(左外连接)"
@@ -1539,7 +991,6 @@
#~ msgstr ""
#~ "select avg(cat.weight), sum(cat.weight), max(cat.weight), count(cat)\n"
#~ "from Cat cat"
-
#~ msgid "count(*)"
#~ msgstr "count(*)"
@@ -2279,3 +1730,4 @@
#~ "where not ( cat.name, cat.color ) in (\n"
#~ " select cat.name, cat.color from DomesticCat cat\n"
#~ ")"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_sql.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_sql.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/query_sql.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-03-12T00:03:47\n"
-"PO-Revision-Date: 2010-01-11 10:42+1000\n"
+"PO-Revision-Date: 2010-03-16 10:16+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,26 +21,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also express queries in the native SQL dialect of your database. "
-"This is useful if you want to utilize database-specific features such as "
-"query hints or the <literal>CONNECT</literal> keyword in Oracle. It also "
-"provides a clean migration path from a direct SQL/JDBC based application to "
-"Hibernate."
-msgstr ""
-"你也可以使用你的数据库的 Native SQL 语言来查询数据。这对你在要使用数据库的某"
-"些特性的时候(比如说在查询提示或者 Oracle 中的 <literal>CONNECT</literal> 关"
-"键字),这是非常有用的。这就能够扫清你把原来直接使用 SQL/JDBC 的程序迁移到基"
-"于 Hibernate 应用的道路上的障碍。 "
+msgid "You can also express queries in the native SQL dialect of your database. This is useful if you want to utilize database-specific features such as query hints or the <literal>CONNECT</literal> keyword in Oracle. It also provides a clean migration path from a direct SQL/JDBC based application to Hibernate."
+msgstr "你也可以使用你的数据库的 Native SQL 语言来查询数据。这对你在要使用数据库的某些特性的时候(比如说在查询提示或者 Oracle 中的 <literal>CONNECT</literal> 关键字),这是非常有用的。这就能够扫清你把原来直接使用 SQL/JDBC 的程序迁移到基于 Hibernate 应用的道路上的障碍。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate3 allows you to specify handwritten SQL, including stored "
-"procedures, for all create, update, delete, and load operations."
-msgstr ""
-"Hibernate3 允许你使用手写的 sql 来完成所有的 create、update、delete 和 load "
-"操作(包括存储过程) "
+msgid "Hibernate3 allows you to specify handwritten SQL, including stored procedures, for all create, update, delete, and load operations."
+msgstr "Hibernate3 允许你使用手写的 sql 来完成所有的 create、update、delete 和 load 操作(包括存储过程) "
#. Tag: title
#, no-c-format
@@ -49,15 +36,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Execution of native SQL queries is controlled via the <literal>SQLQuery</"
-"literal> interface, which is obtained by calling <literal>Session."
-"createSQLQuery()</literal>. The following sections describe how to use this "
-"API for querying."
-msgstr ""
-"对原生 SQL 查询执行的控制是通过 <literal>SQLQuery</literal> 接口进行的,通过"
-"执行<literal>Session.createSQLQuery()</literal>获取这个接口。下面来描述如何使"
-"用这个 API 进行查询。 "
+msgid "Execution of native SQL queries is controlled via the <literal>SQLQuery</literal> interface, which is obtained by calling <literal>Session.createSQLQuery()</literal>. The following sections describe how to use this API for querying."
+msgstr "对原生 SQL 查询执行的控制是通过 <literal>SQLQuery</literal> 接口进行的,通过执行<literal>Session.createSQLQuery()</literal>获取这个接口。下面来描述如何使用这个 API 进行查询。 "
#. Tag: title
#, no-c-format
@@ -71,24 +51,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"These will return a List of Object arrays (Object[]) with scalar values for "
-"each column in the CATS table. Hibernate will use ResultSetMetadata to "
-"deduce the actual order and types of the returned scalar values."
-msgstr ""
-"它们都将返回一个 Object 数组(Object[])组成的 List,数组每个元素都是 CATS 表"
-"的一个字段值。Hibernate 会使用 ResultSetMetadata 来判定返回的标量值的实际顺序"
-"和类型。 "
+msgid "These will return a List of Object arrays (Object[]) with scalar values for each column in the CATS table. Hibernate will use ResultSetMetadata to deduce the actual order and types of the returned scalar values."
+msgstr "它们都将返回一个 Object 数组(Object[])组成的 List,数组每个元素都是 CATS 表的一个字段值。Hibernate 会使用 ResultSetMetadata 来判定返回的标量值的实际顺序和类型。 "
#. Tag: para
#, no-c-format
-msgid ""
-"To avoid the overhead of using <literal>ResultSetMetadata</literal>, or "
-"simply to be more explicit in what is returned, one can use "
-"<literal>addScalar()</literal>:"
-msgstr ""
-"如果要避免过多的使用 <literal>ResultSetMetadata</literal>,或者只是为了更加明"
-"确的指名返回值,可以使用 <literal>addScalar()</literal>: "
+msgid "To avoid the overhead of using <literal>ResultSetMetadata</literal>, or simply to be more explicit in what is returned, one can use <literal>addScalar()</literal>:"
+msgstr "如果要避免过多的使用 <literal>ResultSetMetadata</literal>,或者只是为了更加明确的指名返回值,可以使用 <literal>addScalar()</literal>: "
#. Tag: para
#, no-c-format
@@ -107,48 +76,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This will return Object arrays, but now it will not use "
-"<literal>ResultSetMetadata</literal> but will instead explicitly get the ID, "
-"NAME and BIRTHDATE column as respectively a Long, String and a Short from "
-"the underlying resultset. This also means that only these three columns will "
-"be returned, even though the query is using <literal>*</literal> and could "
-"return more than the three listed columns."
-msgstr ""
-"它仍然会返回 Object 数组,但是此时不再使用 <literal>ResultSetMetdata</"
-"literal>,而是明确的将 ID,NAME 和 BIRTHDATE 按照 Long,String 和 Short 类型"
-"从 resultset 中取出。同时,也指明了就算 query 是使用 <literal>*</literal> 来"
-"查询的,可能获得超过列出的这三个字段,也仅仅会返回这三个字段。 "
+msgid "This will return Object arrays, but now it will not use <literal>ResultSetMetadata</literal> but will instead explicitly get the ID, NAME and BIRTHDATE column as respectively a Long, String and a Short from the underlying resultset. This also means that only these three columns will be returned, even though the query is using <literal>*</literal> and could return more than the three listed columns."
+msgstr "它仍然会返回 Object 数组,但是此时不再使用 <literal>ResultSetMetdata</literal>,而是明确的将 ID,NAME 和 BIRTHDATE 按照 Long,String 和 Short 类型从 resultset 中取出。同时,也指明了就算 query 是使用 <literal>*</literal> 来查询的,可能获得超过列出的这三个字段,也仅仅会返回这三个字段。 "
#. Tag: para
#, no-c-format
-msgid ""
-"It is possible to leave out the type information for all or some of the "
-"scalars."
+msgid "It is possible to leave out the type information for all or some of the scalars."
msgstr "对全部或者部分的标量值不设置类型信息也是可以的。"
#. Tag: para
#, no-c-format
-msgid ""
-"This is essentially the same query as before, but now "
-"<literal>ResultSetMetaData</literal> is used to determine the type of NAME "
-"and BIRTHDATE, where as the type of ID is explicitly specified."
-msgstr ""
-"基本上这和前面一个查询相同,只是此时使用 <literal>ResultSetMetaData</literal> "
-"来决定 NAME 和 BIRTHDATE 的类型,而 ID 的类型是明确指出的。 "
+msgid "This is essentially the same query as before, but now <literal>ResultSetMetaData</literal> is used to determine the type of NAME and BIRTHDATE, where as the type of ID is explicitly specified."
+msgstr "基本上这和前面一个查询相同,只是此时使用 <literal>ResultSetMetaData</literal> 来决定 NAME 和 BIRTHDATE 的类型,而 ID 的类型是明确指出的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"How the java.sql.Types returned from ResultSetMetaData is mapped to "
-"Hibernate types is controlled by the Dialect. If a specific type is not "
-"mapped, or does not result in the expected type, it is possible to customize "
-"it via calls to <literal>registerHibernateType</literal> in the Dialect."
-msgstr ""
-"关于从 ResultSetMetaData 返回的 java.sql.Types 是如何映射到 Hibernate 类型,"
-"是由方言(Dialect)控制的。假若某个指定的类型没有被映射,或者不是你所预期的类"
-"型,你可以通过 Dialet 的 <literal>registerHibernateType</literal> 调用自行定"
-"义。 "
+msgid "How the java.sql.Types returned from ResultSetMetaData is mapped to Hibernate types is controlled by the Dialect. If a specific type is not mapped, or does not result in the expected type, it is possible to customize it via calls to <literal>registerHibernateType</literal> in the Dialect."
+msgstr "关于从 ResultSetMetaData 返回的 java.sql.Types 是如何映射到 Hibernate 类型,是由方言(Dialect)控制的。假若某个指定的类型没有被映射,或者不是你所预期的类型,你可以通过 Dialet 的 <literal>registerHibernateType</literal> 调用自行定义。 "
#. Tag: title
#, no-c-format
@@ -157,14 +101,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The above queries were all about returning scalar values, basically "
-"returning the \"raw\" values from the resultset. The following shows how to "
-"get entity objects from a native sql query via <literal>addEntity()</"
-"literal>."
-msgstr ""
-"上面的查询都是返回标量值的,也就是从 resultset 中返回的“裸”数据。下面展示如何"
-"通过 <literal>addEntity()</literal> 让原生查询返回实体对象。"
+msgid "The above queries were all about returning scalar values, basically returning the \"raw\" values from the resultset. The following shows how to get entity objects from a native sql query via <literal>addEntity()</literal>."
+msgstr "上面的查询都是返回标量值的,也就是从 resultset 中返回的“裸”数据。下面展示如何通过 <literal>addEntity()</literal> 让原生查询返回实体对象。"
#. Tag: para
#, no-c-format
@@ -173,29 +111,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Assuming that Cat is mapped as a class with the columns ID, NAME and "
-"BIRTHDATE the above queries will both return a List where each element is a "
-"Cat entity."
-msgstr ""
-"假设 Cat 被映射为拥有 ID,NAME 和 BIRTHDATE 三个字段的类,以上的两个查询都返"
-"回一个 List,每个元素都是一个 Cat 实体。"
+msgid "Assuming that Cat is mapped as a class with the columns ID, NAME and BIRTHDATE the above queries will both return a List where each element is a Cat entity."
+msgstr "假设 Cat 被映射为拥有 ID,NAME 和 BIRTHDATE 三个字段的类,以上的两个查询都返回一个 List,每个元素都是一个 Cat 实体。"
#. Tag: para
#, no-c-format
-msgid ""
-"If the entity is mapped with a <literal>many-to-one</literal> to another "
-"entity it is required to also return this when performing the native query, "
-"otherwise a database specific \"column not found\" error will occur. The "
-"additional columns will automatically be returned when using the * notation, "
-"but we prefer to be explicit as in the following example for a <literal>many-"
-"to-one</literal> to a <literal>Dog</literal>:"
-msgstr ""
-"假若实体在映射时有一个 <literal>many-to-one</literal> 的关联指向另外一个实"
-"体,在查询时必须也返回那个实体,否则会导致发生一个 \"column not found\" 的数"
-"据库错误。这些附加的字段可以使用 * 标注来自动返回,但我们希望还是明确指明,看"
-"下面这个具有指向 <literal>Dog</literal> 的 <literal>many-to-one</literal> 的"
-"例子:"
+msgid "If the entity is mapped with a <literal>many-to-one</literal> to another entity it is required to also return this when performing the native query, otherwise a database specific \"column not found\" error will occur. The additional columns will automatically be returned when using the * notation, but we prefer to be explicit as in the following example for a <literal>many-to-one</literal> to a <literal>Dog</literal>:"
+msgstr "假若实体在映射时有一个 <literal>many-to-one</literal> 的关联指向另外一个实体,在查询时必须也返回那个实体,否则会导致发生一个 \"column not found\" 的数据库错误。这些附加的字段可以使用 * 标注来自动返回,但我们希望还是明确指明,看下面这个具有指向 <literal>Dog</literal> 的 <literal>many-to-one</literal> 的例子:"
#. Tag: para
#, no-c-format
@@ -209,43 +131,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is possible to eagerly join in the <literal>Dog</literal> to avoid the "
-"possible extra roundtrip for initializing the proxy. This is done via the "
-"<literal>addJoin()</literal> method, which allows you to join in an "
-"association or collection."
-msgstr ""
-"通过提前抓取将 <literal>Dog</literal> 连接获得,而避免初始化 proxy 带来的额外"
-"开销也是可能的。这是通过 <literal>addJoin()</literal> 方法进行的,这个方法可"
-"以让你将关联或集合连接进来。"
+msgid "It is possible to eagerly join in the <literal>Dog</literal> to avoid the possible extra roundtrip for initializing the proxy. This is done via the <literal>addJoin()</literal> method, which allows you to join in an association or collection."
+msgstr "通过提前抓取将 <literal>Dog</literal> 连接获得,而避免初始化 proxy 带来的额外开销也是可能的。这是通过 <literal>addJoin()</literal> 方法进行的,这个方法可以让你将关联或集合连接进来。"
#. Tag: para
#, no-c-format
-msgid ""
-"In this example, the returned <literal>Cat</literal>'s will have their "
-"<literal>dog</literal> property fully initialized without any extra "
-"roundtrip to the database. Notice that you added an alias name (\"cat\") to "
-"be able to specify the target property path of the join. It is possible to "
-"do the same eager joining for collections, e.g. if the <literal>Cat</"
-"literal> had a one-to-many to <literal>Dog</literal> instead."
-msgstr ""
-"上面这个例子中,返回的 <literal>Cat</literal> 对象,其 <literal>dog</"
-"literal> 属性被完全初始化了,不再需要数据库的额外操作。注意,我们加了一个别名"
-"(\"cat\"),以便指明 join 的目标属性路径。通过同样的提前连接也可以作用于集合"
-"类,例如,假若 <literal>Cat</literal> 有一个指向 <literal>Dog</literal> 的一"
-"对多关联。 "
+msgid "In this example, the returned <literal>Cat</literal>'s will have their <literal>dog</literal> property fully initialized without any extra roundtrip to the database. Notice that you added an alias name (\"cat\") to be able to specify the target property path of the join. It is possible to do the same eager joining for collections, e.g. if the <literal>Cat</literal> had a one-to-many to <literal>Dog</literal> instead."
+msgstr "上面这个例子中,返回的 <literal>Cat</literal> 对象,其 <literal>dog</literal> 属性被完全初始化了,不再需要数据库的额外操作。注意,我们加了一个别名(\"cat\"),以便指明 join 的目标属性路径。通过同样的提前连接也可以作用于集合类,例如,假若 <literal>Cat</literal> 有一个指向 <literal>Dog</literal> 的一对多关联。 "
#. Tag: para
#, no-c-format
-msgid ""
-"At this stage you are reaching the limits of what is possible with native "
-"queries, without starting to enhance the sql queries to make them usable in "
-"Hibernate. Problems can arise when returning multiple entities of the same "
-"type or when the default alias/column names are not enough."
-msgstr ""
-"到此为止,我们碰到了天花板:若不对 SQL 查询进行增强,这些已经是在 Hibernate "
-"中使用原生 SQL 查询所能做到的最大可能了。下面的问题即将出现:返回多个同样类型"
-"的实体怎么办?或者默认的别名/字段不够又怎么办? "
+msgid "At this stage you are reaching the limits of what is possible with native queries, without starting to enhance the sql queries to make them usable in Hibernate. Problems can arise when returning multiple entities of the same type or when the default alias/column names are not enough."
+msgstr "到此为止,我们碰到了天花板:若不对 SQL 查询进行增强,这些已经是在 Hibernate 中使用原生 SQL 查询所能做到的最大可能了。下面的问题即将出现:返回多个同样类型的实体怎么办?或者默认的别名/字段不够又怎么办? "
#. Tag: title
#, no-c-format
@@ -254,36 +151,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Until now, the result set column names are assumed to be the same as the "
-"column names specified in the mapping document. This can be problematic for "
-"SQL queries that join multiple tables, since the same column names can "
-"appear in more than one table."
-msgstr ""
-"到目前为止,结果集字段名被假定为和映射文件中指定的的字段名是一致的。假若 SQL "
-"查询连接了多个表,同一个字段名可能在多个表中出现多次,这就会造成问题。 "
+msgid "Until now, the result set column names are assumed to be the same as the column names specified in the mapping document. This can be problematic for SQL queries that join multiple tables, since the same column names can appear in more than one table."
+msgstr "到目前为止,结果集字段名被假定为和映射文件中指定的的字段名是一致的。假若 SQL 查询连接了多个表,同一个字段名可能在多个表中出现多次,这就会造成问题。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Column alias injection is needed in the following query (which most likely "
-"will fail):"
+msgid "Column alias injection is needed in the following query (which most likely will fail):"
msgstr "下面的查询中需要使用字段别名注射(这个例子本身会失败):"
#. Tag: para
#, no-c-format
-msgid ""
-"The query was intended to return two Cat instances per row: a cat and its "
-"mother. The query will, however, fail because there is a conflict of names; "
-"the instances are mapped to the same column names. Also, on some databases "
-"the returned column aliases will most likely be on the form \"c.ID\", \"c."
-"NAME\", etc. which are not equal to the columns specified in the mappings "
-"(\"ID\" and \"NAME\")."
-msgstr ""
-"这个查询的本意是希望每行返回两个 Cat 实例,一个是 cat,另一个是它的妈妈。但是"
-"因为它们的字段名被映射为相同的,而且在某些数据库中,返回的字段别名是“c.ID”,"
-"\"c.NAME\" 这样的形式,而它们和在映射文件中的名字(\"ID\" 和 \"NAME\")不匹"
-"配,这就会造成失败。 "
+msgid "The query was intended to return two Cat instances per row: a cat and its mother. The query will, however, fail because there is a conflict of names; the instances are mapped to the same column names. Also, on some databases the returned column aliases will most likely be on the form \"c.ID\", \"c.NAME\", etc. which are not equal to the columns specified in the mappings (\"ID\" and \"NAME\")."
+msgstr "这个查询的本意是希望每行返回两个 Cat 实例,一个是 cat,另一个是它的妈妈。但是因为它们的字段名被映射为相同的,而且在某些数据库中,返回的字段别名是“c.ID”,\"c.NAME\" 这样的形式,而它们和在映射文件中的名字(\"ID\" 和 \"NAME\")不匹配,这就会造成失败。 "
#. Tag: para
#, no-c-format
@@ -292,9 +171,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"the SQL query string, with placeholders for Hibernate to inject column "
-"aliases"
+msgid "the SQL query string, with placeholders for Hibernate to inject column aliases"
msgstr "SQL 查询语句,其中包含占位附来让 Hibernate 注射字段别名"
#. Tag: para
@@ -304,20 +181,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The {cat.*} and {mother.*} notation used above is a shorthand for \"all "
-"properties\". Alternatively, you can list the columns explicitly, but even "
-"in this case Hibernate injects the SQL column aliases for each property. The "
-"placeholder for a column alias is just the property name qualified by the "
-"table alias. In the following example, you retrieve Cats and their mothers "
-"from a different table (cat_log) to the one declared in the mapping "
-"metadata. You can even use the property aliases in the where clause."
-msgstr ""
-"上面使用的 {cat.*} 和 {mother.*} 标记是作为“所有属性”的简写形式出现的。当然你"
-"也可以明确地罗列出字段名,但在这个例子里面我们让 Hibernate 来为每个属性注射 "
-"SQL 字段别名。字段别名的占位符是属性名加上表别名的前缀。在下面的例子中,我们"
-"从另外一个表(cat_log)中通过映射元数据中的指定获取 Cat 和它的妈妈。注意,要"
-"是我们愿意,我们甚至可以在 where 子句中使用属性别名。 "
+msgid "The {cat.*} and {mother.*} notation used above is a shorthand for \"all properties\". Alternatively, you can list the columns explicitly, but even in this case Hibernate injects the SQL column aliases for each property. The placeholder for a column alias is just the property name qualified by the table alias. In the following example, you retrieve Cats and their mothers from a different table (cat_log) to the one declared in the mapping metadata. You can even use the property aliases in the where clause."
+msgstr "上面使用的 {cat.*} 和 {mother.*} 标记是作为“所有属性”的简写形式出现的。当然你也可以明确地罗列出字段名,但在这个例子里面我们让 Hibernate 来为每个属性注射 SQL 字段别名。字段别名的占位符是属性名加上表别名的前缀。在下面的例子中,我们从另外一个表(cat_log)中通过映射元数据中的指定获取 Cat 和它的妈妈。注意,要是我们愿意,我们甚至可以在 where 子句中使用属性别名。 "
#. Tag: title
#, no-c-format
@@ -326,26 +191,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"In most cases the above alias injection is needed. For queries relating to "
-"more complex mappings, like composite properties, inheritance "
-"discriminators, collections etc., you can use specific aliases that allow "
-"Hibernate to inject the proper aliases."
-msgstr ""
-"大多数情况下,都需要上面的属性注射,但在使用更加复杂的映射,比如复合属性、通"
-"过标识符构造继承树,以及集合类等等情况下,也有一些特别的别名,来允许 "
-"Hibernate 注入合适的别名。 "
+msgid "In most cases the above alias injection is needed. For queries relating to more complex mappings, like composite properties, inheritance discriminators, collections etc., you can use specific aliases that allow Hibernate to inject the proper aliases."
+msgstr "大多数情况下,都需要上面的属性注射,但在使用更加复杂的映射,比如复合属性、通过标识符构造继承树,以及集合类等等情况下,也有一些特别的别名,来允许 Hibernate 注入合适的别名。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The following table shows the different ways you can use the alias "
-"injection. Please note that the alias names in the result are simply "
-"examples; each alias will have a unique and probably different name when "
-"used."
-msgstr ""
-"下表列出了使用别名注射参数的不同可能性。注意:下面结果中的别名只是示例,实用"
-"时每个别名需要唯一并且不同的名字。 "
+msgid "The following table shows the different ways you can use the alias injection. Please note that the alias names in the result are simply examples; each alias will have a unique and probably different name when used."
+msgstr "下表列出了使用别名注射参数的不同可能性。注意:下面结果中的别名只是示例,实用时每个别名需要唯一并且不同的名字。 "
#. Tag: title
#, no-c-format
@@ -373,14 +225,14 @@
msgstr "简单属性"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].[propertyname]</literal>"
-msgstr "<literal>{[aliasname].[propertyname]</literal>"
+msgstr "<literal>{[aliasname].[propertyname]</literal> "
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>A_NAME as {item.name}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>A_NAME as {item.name}</literal>"
#. Tag: entry
#, no-c-format
@@ -388,18 +240,14 @@
msgstr "复合属性"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].[componentname].[propertyname]}</literal>"
-msgstr "<literal>{[aliasname].[componentname].[propertyname]}</literal>"
+msgstr "<literal>{[aliasname].[componentname].[propertyname]}</literal> "
#. Tag: entry
-#, fuzzy, no-c-format
-msgid ""
-"<literal>CURRENCY as {item.amount.currency}, VALUE as {item.amount.value}</"
-"literal>"
-msgstr ""
-"<literal>CURRENCY as {item.amount.currency}, VALUE as {item.amount.value}</"
-"literal>"
+#, no-c-format
+msgid "<literal>CURRENCY as {item.amount.currency}, VALUE as {item.amount.value}</literal>"
+msgstr "<literal>CURRENCY as {item.amount.currency}, VALUE as {item.amount.value}</literal> "
#. Tag: entry
#, no-c-format
@@ -407,14 +255,14 @@
msgstr "实体辨别器(Discriminator of an entity)"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].class}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>{[aliasname].class}</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>DISC as {item.class}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>DISC as {item.class}</literal>"
#. Tag: entry
#, no-c-format
@@ -422,9 +270,9 @@
msgstr "实体的所有属性"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].*}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>{[aliasname].*}</literal>"
#. Tag: entry
#, no-c-format
@@ -437,14 +285,14 @@
msgstr "集合键(collection key)"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].key}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>{[aliasname].key}</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>ORGID as {coll.key}</literal>"
-msgstr "<literal>{coll.*}</literal>"
+msgstr "<literal>ORGID as {coll.key}</literal>"
#. Tag: entry
#, no-c-format
@@ -452,14 +300,14 @@
msgstr "集合 id"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].id}</literal>"
-msgstr "<literal>{item.*}</literal>"
+msgstr "<literal>{[aliasname].id}</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>EMPID as {coll.id}</literal>"
-msgstr "<literal>{coll.*}</literal>"
+msgstr "<literal>EMPID as {coll.id}</literal>"
#. Tag: entry
#, no-c-format
@@ -467,14 +315,14 @@
msgstr "集合元素"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].element}</literal>"
-msgstr "<literal>{coll.element.*}</literal>"
+msgstr "<literal>{[aliasname].element}</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>XID as {coll.element}</literal>"
-msgstr "<literal>{coll.element.*}</literal>"
+msgstr "<literal>XID as {coll.element}</literal>"
#. Tag: entry
#, no-c-format
@@ -482,14 +330,14 @@
msgstr "集合元素的属性 "
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].element.[propertyname]}</literal>"
-msgstr "<literal>{[aliasname].element.[propertyname]}</literal>"
+msgstr "<literal>{[aliasname].element.[propertyname]}</literal> "
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>NAME as {coll.element.name}</literal>"
-msgstr "<literal>{coll.element.*}</literal>"
+msgstr "<literal>NAME as {coll.element.name}</literal>"
#. Tag: entry
#, no-c-format
@@ -497,9 +345,9 @@
msgstr "集合元素的所有属性"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>{[aliasname].element.*}</literal>"
-msgstr "<literal>{coll.element.*}</literal>"
+msgstr "<literal>{[aliasname].element.*}</literal>"
#. Tag: entry
#, no-c-format
@@ -507,9 +355,9 @@
msgstr "<literal>{coll.element.*}</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "All properties of the collection"
-msgstr "集合的所有属性"
+msgstr "集合的所有属性 "
#. Tag: entry
#, no-c-format
@@ -523,12 +371,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is possible to apply a ResultTransformer to native SQL queries, allowing "
-"it to return non-managed entities."
-msgstr ""
-"可以对原生 sql 查询使用 ResultTransformer。这会返回不受 Hibernate 管理的实"
-"体。 "
+msgid "It is possible to apply a ResultTransformer to native SQL queries, allowing it to return non-managed entities."
+msgstr "可以对原生 sql 查询使用 ResultTransformer。这会返回不受 Hibernate 管理的实体。 "
#. Tag: para
#, no-c-format
@@ -537,13 +381,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The above query will return a list of <literal>CatDTO</literal> which has "
-"been instantiated and injected the values of NAME and BIRTHNAME into its "
-"corresponding properties or fields."
-msgstr ""
-"上面的查询将会返回 <literal>CatDTO</literal> 的列表,它将被实例化并且将 NAME "
-"和 BIRTHDAY 的值注射入对应的属性或者字段。"
+msgid "The above query will return a list of <literal>CatDTO</literal> which has been instantiated and injected the values of NAME and BIRTHNAME into its corresponding properties or fields."
+msgstr "上面的查询将会返回 <literal>CatDTO</literal> 的列表,它将被实例化并且将 NAME 和 BIRTHDAY 的值注射入对应的属性或者字段。"
#. Tag: title
#, no-c-format
@@ -552,13 +391,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Native SQL queries which query for entities that are mapped as part of an "
-"inheritance must include all properties for the baseclass and all its "
-"subclasses."
-msgstr ""
-"原生 SQL 查询假若其查询结果实体是继承树中的一部分,它必须包含基类和所有子类的"
-"所有属性。 "
+msgid "Native SQL queries which query for entities that are mapped as part of an inheritance must include all properties for the baseclass and all its subclasses."
+msgstr "原生 SQL 查询假若其查询结果实体是继承树中的一部分,它必须包含基类和所有子类的所有属性。 "
#. Tag: title
#, no-c-format
@@ -577,52 +411,27 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Named SQL queries can be defined in the mapping document and called in "
-"exactly the same way as a named HQL query. In this case, you do "
-"<emphasis>not</emphasis> need to call <literal>addEntity()</literal>."
-msgstr ""
-"可以在映射文档中定义查询的名字,然后就可以象调用一个命名的 HQL 查询一样直接调"
-"用命名 SQL 查询.在这种情况下,我们<emphasis>不</emphasis> 需要调用 "
-"<literal>addEntity()</literal> 方法。 "
+msgid "Named SQL queries can be defined in the mapping document and called in exactly the same way as a named HQL query. In this case, you do <emphasis>not</emphasis> need to call <literal>addEntity()</literal>."
+msgstr "可以在映射文档中定义查询的名字,然后就可以象调用一个命名的 HQL 查询一样直接调用命名 SQL 查询.在这种情况下,我们<emphasis>不</emphasis> 需要调用 <literal>addEntity()</literal> 方法。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal><return-join></literal> element is use to join "
-"associations and the <literal><load-collection></literal> element is "
-"used to define queries which initialize collections,"
-msgstr ""
-"<literal><return-join></literal> 和 <literal><load-collection></"
-"literal> 元素是用来连接关联以及将查询定义为预先初始化各个集合的。 "
+msgid "The <literal><return-join></literal> element is use to join associations and the <literal><load-collection></literal> element is used to define queries which initialize collections,"
+msgstr "<literal><return-join></literal> 和 <literal><load-collection></literal> 元素是用来连接关联以及将查询定义为预先初始化各个集合的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A named SQL query may return a scalar value. You must declare the column "
-"alias and Hibernate type using the <literal><return-scalar></literal> "
-"element:"
-msgstr ""
-"一个命名查询可能会返回一个标量值。你必须使用 <literal><return-scalar></"
-"literal> 元素来指定字段的别名和 Hibernate 类型:"
+msgid "A named SQL query may return a scalar value. You must declare the column alias and Hibernate type using the <literal><return-scalar></literal> element:"
+msgstr "一个命名查询可能会返回一个标量值。你必须使用 <literal><return-scalar></literal> 元素来指定字段的别名和 Hibernate 类型:"
#. Tag: para
#, no-c-format
-msgid ""
-"You can externalize the resultset mapping information in a <literal><"
-"resultset></literal> element which will allow you to either reuse them "
-"across several named queries or through the <literal>setResultSetMapping()</"
-"literal> API."
-msgstr ""
-"你可以把结果集映射的信息放在外部的 <literal><resultset></literal> 元素"
-"中,这样就可以在多个命名查询间,或者通过 <literal>setResultSetMapping()</"
-"literal> API 来访问。 "
+msgid "You can externalize the resultset mapping information in a <literal><resultset></literal> element which will allow you to either reuse them across several named queries or through the <literal>setResultSetMapping()</literal> API."
+msgstr "你可以把结果集映射的信息放在外部的 <literal><resultset></literal> 元素中,这样就可以在多个命名查询间,或者通过 <literal>setResultSetMapping()</literal> API 来访问。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can, alternatively, use the resultset mapping information in your hbm "
-"files directly in java code."
+msgid "You can, alternatively, use the resultset mapping information in your hbm files directly in java code."
msgstr "另外,你可以在 java 代码中直接使用 hbm 文件中的结果集定义信息。 "
#. Tag: title
@@ -632,43 +441,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can explicitly tell Hibernate what column aliases to use with "
-"<literal><return-property></literal>, instead of using the <literal>{}"
-"</literal>-syntax to let Hibernate inject its own aliases.For example:"
-msgstr ""
-"使用 <literal><return-property></literal> 你可以明确的告诉 Hibernate 使"
-"用哪些字段别名,这取代了使用 <literal>{}</literal>-语法 来让 Hibernate 注入它"
-"自己的别名。例如:"
+msgid "You can explicitly tell Hibernate what column aliases to use with <literal><return-property></literal>, instead of using the <literal>{}</literal>-syntax to let Hibernate inject its own aliases.For example:"
+msgstr "使用 <literal><return-property></literal> 你可以明确的告诉 Hibernate 使用哪些字段别名,这取代了使用 <literal>{}</literal>-语法 来让 Hibernate 注入它自己的别名。例如:"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal><return-property></literal> also works with multiple columns. "
-"This solves a limitation with the <literal>{}</literal>-syntax which cannot "
-"allow fine grained control of multi-column properties."
-msgstr ""
-"<literal><return-property></literal> 也可用于多个字段,它解决了使用 "
-"<literal>{}</literal>-语法不能细粒度控制多个字段的限制。 "
+msgid "<literal><return-property></literal> also works with multiple columns. This solves a limitation with the <literal>{}</literal>-syntax which cannot allow fine grained control of multi-column properties."
+msgstr "<literal><return-property></literal> 也可用于多个字段,它解决了使用 <literal>{}</literal>-语法不能细粒度控制多个字段的限制。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In this example <literal><return-property></literal> was used in "
-"combination with the <literal>{}</literal>-syntax for injection. This allows "
-"users to choose how they want to refer column and properties."
-msgstr ""
-"注意在这个例子中,我们使用了 <literal><return-property></literal> 结合 "
-"<literal>{}</literal> 的注入语法。允许用户来选择如何引用字段以及属性。 "
+msgid "In this example <literal><return-property></literal> was used in combination with the <literal>{}</literal>-syntax for injection. This allows users to choose how they want to refer column and properties."
+msgstr "注意在这个例子中,我们使用了 <literal><return-property></literal> 结合 <literal>{}</literal> 的注入语法。允许用户来选择如何引用字段以及属性。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If your mapping has a discriminator you must use <literal><return-"
-"discriminator></literal> to specify the discriminator column."
-msgstr ""
-"如果你映射一个识别器(discriminator),你必须使用 <literal><return-"
-"discriminator></literal> 来指定识别器字段。"
+msgid "If your mapping has a discriminator you must use <literal><return-discriminator></literal> to specify the discriminator column."
+msgstr "如果你映射一个识别器(discriminator),你必须使用 <literal><return-discriminator></literal> 来指定识别器字段。"
#. Tag: title
#, no-c-format
@@ -677,17 +466,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate3 provides support for queries via stored procedures and functions. "
-"Most of the following documentation is equivalent for both. The stored "
-"procedure/function must return a resultset as the first out-parameter to be "
-"able to work with Hibernate. An example of such a stored function in Oracle "
-"9 and higher is as follows:"
-msgstr ""
-"Hibernate 3 引入了对存储过程查询(stored procedure)和函数(function)的支"
-"持。以下的说明中,这二者一般都适用。存储过程/函数必须返回一个结果集,作为 "
-"Hibernate 能够使用的第一个外部参数。下面是一个 Oracle9 和更高版本的存储过程例"
-"子。 "
+msgid "Hibernate3 provides support for queries via stored procedures and functions. Most of the following documentation is equivalent for both. The stored procedure/function must return a resultset as the first out-parameter to be able to work with Hibernate. An example of such a stored function in Oracle 9 and higher is as follows:"
+msgstr "Hibernate 3 引入了对存储过程查询(stored procedure)和函数(function)的支持。以下的说明中,这二者一般都适用。存储过程/函数必须返回一个结果集,作为 Hibernate 能够使用的第一个外部参数。下面是一个 Oracle9 和更高版本的存储过程例子。 "
#. Tag: para
#, no-c-format
@@ -696,13 +476,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Stored procedures currently only return scalars and entities. <literal><"
-"return-join></literal> and <literal><load-collection></literal> are "
-"not supported."
-msgstr ""
-"注意存储过程当前仅仅返回标量和实体现在。不支持 <literal><return-join></"
-"literal> 和 <literal><load-collection></literal>。 "
+msgid "Stored procedures currently only return scalars and entities. <literal><return-join></literal> and <literal><load-collection></literal> are not supported."
+msgstr "注意存储过程当前仅仅返回标量和实体现在。不支持 <literal><return-join></literal> 和 <literal><load-collection></literal>。 "
#. Tag: title
#, no-c-format
@@ -711,38 +486,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You cannot use stored procedures with Hibernate unless you follow some "
-"procedure/function rules. If they do not follow those rules they are not "
-"usable with Hibernate. If you still want to use these procedures you have to "
-"execute them via <literal>session.connection()</literal>. The rules are "
-"different for each database, since database vendors have different stored "
-"procedure semantics/syntax."
-msgstr ""
-"为了在 Hibernate 中使用存储过程,你必须遵循一些规则。不遵循这些规则的存储过程"
-"将不可用。如果你仍然想使用他们,你必须通过 <literal>session.connection()</"
-"literal> 来执行他们。这些规则针对于不同的数据库。因为数据库提供商有各种不同的"
-"存储过程语法和语义。 "
+msgid "You cannot use stored procedures with Hibernate unless you follow some procedure/function rules. If they do not follow those rules they are not usable with Hibernate. If you still want to use these procedures you have to execute them via <literal>session.connection()</literal>. The rules are different for each database, since database vendors have different stored procedure semantics/syntax."
+msgstr "为了在 Hibernate 中使用存储过程,你必须遵循一些规则。不遵循这些规则的存储过程将不可用。如果你仍然想使用他们,你必须通过 <literal>session.connection()</literal> 来执行他们。这些规则针对于不同的数据库。因为数据库提供商有各种不同的存储过程语法和语义。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Stored procedure queries cannot be paged with <literal>setFirstResult()/"
-"setMaxResults()</literal>."
-msgstr ""
-"对存储过程进行的查询无法使用 <literal>setFirstResult()/setMaxResults()</"
-"literal> 进行分页。 "
+msgid "Stored procedure queries cannot be paged with <literal>setFirstResult()/setMaxResults()</literal>."
+msgstr "对存储过程进行的查询无法使用 <literal>setFirstResult()/setMaxResults()</literal> 进行分页。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The recommended call form is standard SQL92: <literal>{ ? = call functionName"
-"(<parameters>) }</literal> or <literal>{ ? = call procedureName(<"
-"parameters>}</literal>. Native call syntax is not supported."
-msgstr ""
-"建议采用的调用方式是标准 SQL92: <literal>{ ? = call functionName(<"
-"parameters>) }</literal> 或者 <literal>{ ? = call procedureName(<"
-"parameters>) }</literal>。原生调用语法不被支持。 "
+msgid "The recommended call form is standard SQL92: <literal>{ ? = call functionName(<parameters>) }</literal> or <literal>{ ? = call procedureName(<parameters>}</literal>. Native call syntax is not supported."
+msgstr "建议采用的调用方式是标准 SQL92: <literal>{ ? = call functionName(<parameters>) }</literal> 或者 <literal>{ ? = call procedureName(<parameters>) }</literal>。原生调用语法不被支持。 "
#. Tag: para
#, no-c-format
@@ -751,17 +506,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A function must return a result set. The first parameter of a procedure must "
-"be an <literal>OUT</literal> that returns a result set. This is done by "
-"using a <literal>SYS_REFCURSOR</literal> type in Oracle 9 or 10. In Oracle "
-"you need to define a <literal>REF CURSOR</literal> type. See Oracle "
-"literature for further information."
-msgstr ""
-"函数必须返回一个结果集。存储过程的第一个参数必须是 <literal>OUT</literal>,它"
-"返回一个结果集。这是通过 Oracle 9 或 10 的 <literal>SYS_REFCURSOR</literal> "
-"类型来完成的。在 Oracle 中你需要定义一个 <literal>REF CURSOR</literal> 类型,"
-"参见 Oracle 的手册。 "
+msgid "A function must return a result set. The first parameter of a procedure must be an <literal>OUT</literal> that returns a result set. This is done by using a <literal>SYS_REFCURSOR</literal> type in Oracle 9 or 10. In Oracle you need to define a <literal>REF CURSOR</literal> type. See Oracle literature for further information."
+msgstr "函数必须返回一个结果集。存储过程的第一个参数必须是 <literal>OUT</literal>,它返回一个结果集。这是通过 Oracle 9 或 10 的 <literal>SYS_REFCURSOR</literal> 类型来完成的。在 Oracle 中你需要定义一个 <literal>REF CURSOR</literal> 类型,参见 Oracle 的手册。 "
#. Tag: para
#, no-c-format
@@ -770,23 +516,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The procedure must return a result set. Note that since these servers can "
-"return multiple result sets and update counts, Hibernate will iterate the "
-"results and take the first result that is a result set as its return value. "
-"Everything else will be discarded."
-msgstr ""
-"存储过程必须返回一个结果集。注意这些 servers 可能返回多个结果集以及更新的数"
-"目。Hibernate 将取出第一条结果集作为它的返回值,其他将被丢弃。 "
+msgid "The procedure must return a result set. Note that since these servers can return multiple result sets and update counts, Hibernate will iterate the results and take the first result that is a result set as its return value. Everything else will be discarded."
+msgstr "存储过程必须返回一个结果集。注意这些 servers 可能返回多个结果集以及更新的数目。Hibernate 将取出第一条结果集作为它的返回值,其他将被丢弃。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you can enable <literal>SET NOCOUNT ON</literal> in your procedure it "
-"will probably be more efficient, but this is not a requirement."
-msgstr ""
-"如果你能够在存储过程里设定 <literal>SET NOCOUNT ON</literal>,这可能会效率更"
-"高,但这不是必需的。"
+msgid "If you can enable <literal>SET NOCOUNT ON</literal> in your procedure it will probably be more efficient, but this is not a requirement."
+msgstr "如果你能够在存储过程里设定 <literal>SET NOCOUNT ON</literal>,这可能会效率更高,但这不是必需的。"
#. Tag: title
#, no-c-format
@@ -795,78 +531,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate3 can use custom SQL for create, update, and delete operations. The "
-"SQL can be overridden at the statement level or inidividual column level. "
-"This section describes statement overrides. For columns, see <xref linkend="
-"\"mapping-column-read-and-write\" />."
-msgstr ""
+msgid "Hibernate3 can use custom SQL for create, update, and delete operations. The SQL can be overridden at the statement level or inidividual column level. This section describes statement overrides. For columns, see <xref linkend=\"mapping-column-read-and-write\" />."
+msgstr "Hibernate3 可以自定义 create、update 和 delete 操作。SQL 可以在语句或单个字段级别进行覆盖。本节描述了语句覆盖。对于字段覆盖,请参考 <xref linkend=\"mapping-column-read-and-write\" />。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The class and collection persisters in Hibernate already contain a set of "
-"configuration time generated strings (insertsql, deletesql, updatesql etc.). "
-"The mapping tags <literal><sql-insert></literal>, <literal><sql-"
-"delete></literal>, and <literal><sql-update></literal> override "
-"these strings:"
-msgstr ""
-"Hibernate3 能够使用定制的 SQL 语句来执行 create,update 和 delete 操作。在 "
-"Hibernate 中,持久化的类和集合已经包含了一套配置期产生的语句(insertsql,"
-"deletesql,updatesql 等等),这些映射标记 <literal><sql-insert></"
-"literal>,<literal><sql-delete></literal> 和 <literal><sql-"
-"update></literal> 重载了这些语句。"
+#, no-c-format
+msgid "The class and collection persisters in Hibernate already contain a set of configuration time generated strings (insertsql, deletesql, updatesql etc.). The mapping tags <literal><sql-insert></literal>, <literal><sql-delete></literal>, and <literal><sql-update></literal> override these strings:"
+msgstr "Hibernate3 能够使用定制的 SQL 语句来执行 create,update 和 delete 操作。在 Hibernate 中,持久化的类和集合已经包含了一套配置期产生的语句(insertsql,deletesql,updatesql 等等),这些映射标记 <literal><sql-insert></literal>,<literal><sql-delete></literal> 和 <literal><sql-update></literal> 重载了这些语句。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The SQL is directly executed in your database, so you can use any dialect "
-"you like. This will reduce the portability of your mapping if you use "
-"database specific SQL."
-msgstr ""
-"这些 SQL 直接在你的数据库里执行,所以你可以自由的使用你喜欢的任意语法。但如果"
-"你使用数据库特定的语法,这当然会降低你映射的可移植性。 "
+msgid "The SQL is directly executed in your database, so you can use any dialect you like. This will reduce the portability of your mapping if you use database specific SQL."
+msgstr "这些 SQL 直接在你的数据库里执行,所以你可以自由的使用你喜欢的任意语法。但如果你使用数据库特定的语法,这当然会降低你映射的可移植性。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Stored procedures are supported if the <literal>callable</literal> attribute "
-"is set:"
+msgid "Stored procedures are supported if the <literal>callable</literal> attribute is set:"
msgstr "如果设定 <literal>callable</literal>,则能够支持存储过程了。"
#. Tag: para
#, no-c-format
-msgid ""
-"The order of the positional parameters is vital, as they must be in the same "
-"sequence as Hibernate expects them."
+msgid "The order of the positional parameters is vital, as they must be in the same sequence as Hibernate expects them."
msgstr "参数的位置顺序是非常重要的,他们必须和 Hibernate 所期待的顺序相同。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can view the expected order by enabling debug logging for the "
-"<literal>org.hibernate.persister.entity</literal> level. With this level "
-"enabled, Hibernate will print out the static SQL that is used to create, "
-"update, delete etc. entities. To view the expected sequence, do not include "
-"your custom SQL in the mapping files, as this will override the Hibernate "
-"generated static SQL."
-msgstr ""
-"你能够通过设定日志调试级别为 <literal>org.hiberante.persister.entity</"
-"literal> 来查看 Hibernate 所期待的顺序。在这个级别下,Hibernate 将会打印出"
-"create,update 和 delete 实体的静态 SQL。(如果想看到预计的顺序。记得不要将定"
-"制 SQL 包含在映射文件里,因为他们会重载 Hibernate 生成的静态 SQL。) "
+msgid "You can view the expected order by enabling debug logging for the <literal>org.hibernate.persister.entity</literal> level. With this level enabled, Hibernate will print out the static SQL that is used to create, update, delete etc. entities. To view the expected sequence, do not include your custom SQL in the mapping files, as this will override the Hibernate generated static SQL."
+msgstr "你能够通过设定日志调试级别为 <literal>org.hiberante.persister.entity</literal> 来查看 Hibernate 所期待的顺序。在这个级别下,Hibernate 将会打印出create,update 和 delete 实体的静态 SQL。(如果想看到预计的顺序。记得不要将定制 SQL 包含在映射文件里,因为他们会重载 Hibernate 生成的静态 SQL。) "
#. Tag: para
#, no-c-format
-msgid ""
-"The stored procedures are in most cases required to return the number of "
-"rows inserted, updated and deleted, as Hibernate has some runtime checks for "
-"the success of the statement. Hibernate always registers the first statement "
-"parameter as a numeric output parameter for the CUD operations:"
-msgstr ""
-"在大多数情况下(最好这么做),存储过程需要返回插入/更新/删除的行数,因为 "
-"Hibernate 对语句的成功执行有些运行时的检查。Hibernate 常会把进行 CUD 操作的语"
-"句的第一个参数注册为一个数值型输出参数。 "
+msgid "The stored procedures are in most cases required to return the number of rows inserted, updated and deleted, as Hibernate has some runtime checks for the success of the statement. Hibernate always registers the first statement parameter as a numeric output parameter for the CUD operations:"
+msgstr "在大多数情况下(最好这么做),存储过程需要返回插入/更新/删除的行数,因为 Hibernate 对语句的成功执行有些运行时的检查。Hibernate 常会把进行 CUD 操作的语句的第一个参数注册为一个数值型输出参数。 "
#. Tag: title
#, no-c-format
@@ -875,21 +571,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also declare your own SQL (or HQL) queries for entity loading. As "
-"with inserts, updates, and deletes, this can be done at the individual "
-"column level as described in <xref linkend=\"mapping-column-read-and-write"
-"\" /> or at the statement level. Here is an example of a statement level "
-"override:"
-msgstr ""
+msgid "You can also declare your own SQL (or HQL) queries for entity loading. As with inserts, updates, and deletes, this can be done at the individual column level as described in <xref linkend=\"mapping-column-read-and-write\" /> or at the statement level. Here is an example of a statement level override:"
+msgstr "你也可以为实体加载声明自己的 SQL(或 HQL)。如 <xref linkend=\"mapping-column-read-and-write\" /> 所描述的,inserts、updates 和 deletes 可以在字段级别或语句级别来完成。下面是一个语句覆盖的例子:"
#. Tag: para
#, no-c-format
-msgid ""
-"This is just a named query declaration, as discussed earlier. You can "
-"reference this named query in a class mapping:"
-msgstr ""
-"这只是一个前面讨论过的命名查询声明,你可以在类映射里引用这个命名查询。 "
+msgid "This is just a named query declaration, as discussed earlier. You can reference this named query in a class mapping:"
+msgstr "这只是一个前面讨论过的命名查询声明,你可以在类映射里引用这个命名查询。 "
#. Tag: para
#, no-c-format
@@ -903,9 +591,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also define an entity loader that loads a collection by join "
-"fetching:"
+msgid "You can also define an entity loader that loads a collection by join fetching:"
msgstr "你甚至还可以定义一个实体装载器,它通过连接抓取装载一个集合: "
#~ msgid ""
@@ -1058,7 +744,6 @@
#, fuzzy
#~ msgid "{[aliasname].*}"
#~ msgstr "<literal>{[aliasname].*}</literal>"
-
#~ msgid "{item.*}"
#~ msgstr "{item.*}"
@@ -1097,7 +782,6 @@
#, fuzzy
#~ msgid "{coll.element.*}"
#~ msgstr "{coll.*}"
-
#~ msgid "{coll.*}"
#~ msgstr "{coll.*}"
@@ -1568,3 +1252,4 @@
#~ " ON pers.ID = emp.PERSON_ID\n"
#~ " WHERE ID=?\n"
#~ "</sql-query>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/session_api.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/session_api.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/session_api.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -7,7 +7,7 @@
"Project-Id-Version: session_api\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-03-12T00:03:48\n"
-"PO-Revision-Date: 2009-12-04 14:00+1000\n"
+"PO-Revision-Date: 2010-03-16 09:58+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -22,31 +22,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate is a full object/relational mapping solution that not only shields "
-"the developer from the details of the underlying database management system, "
-"but also offers <emphasis>state management</emphasis> of objects. This is, "
-"contrary to the management of SQL <literal>statements</literal> in common "
-"JDBC/SQL persistence layers, a natural object-oriented view of persistence "
-"in Java applications."
-msgstr ""
-"Hibernate 是完整的对象/关系映射解决方案,它提供了对象<emphasis>状态管理"
-"(state management)</emphasis>的功能,使开发者不再需要理会底层数据库系统的细"
-"节。也就是说,相对于常见的 JDBC/SQL 持久层方案中需要<literal>管理 SQL 语句</"
-"literal>,Hibernate 采用了更自然的面向对象的视角来持久化 Java 应用中的数据。 "
+msgid "Hibernate is a full object/relational mapping solution that not only shields the developer from the details of the underlying database management system, but also offers <emphasis>state management</emphasis> of objects. This is, contrary to the management of SQL <literal>statements</literal> in common JDBC/SQL persistence layers, a natural object-oriented view of persistence in Java applications."
+msgstr "Hibernate 是完整的对象/关系映射解决方案,它提供了对象<emphasis>状态管理(state management)</emphasis>的功能,使开发者不再需要理会底层数据库系统的细节。也就是说,相对于常见的 JDBC/SQL 持久层方案中需要<literal>管理 SQL 语句</literal>,Hibernate 采用了更自然的面向对象的视角来持久化 Java 应用中的数据。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In other words, Hibernate application developers should always think about "
-"the <emphasis>state</emphasis> of their objects, and not necessarily about "
-"the execution of SQL statements. This part is taken care of by Hibernate and "
-"is only relevant for the application developer when tuning the performance "
-"of the system."
-msgstr ""
-"换句话说,使用 Hibernate 的开发者应该总是关注对象的<emphasis>状态(state)</"
-"emphasis>,不必考虑 SQL 语句的执行。这部分细节已经由 Hibernate 掌管妥当,只有"
-"开发者在进行系统性能调优的时候才需要进行了解。"
+msgid "In other words, Hibernate application developers should always think about the <emphasis>state</emphasis> of their objects, and not necessarily about the execution of SQL statements. This part is taken care of by Hibernate and is only relevant for the application developer when tuning the performance of the system."
+msgstr "换句话说,使用 Hibernate 的开发者应该总是关注对象的<emphasis>状态(state)</emphasis>,不必考虑 SQL 语句的执行。这部分细节已经由 Hibernate 掌管妥当,只有开发者在进行系统性能调优的时候才需要进行了解。"
#. Tag: title
#, no-c-format
@@ -60,76 +42,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Transient</emphasis> - an object is transient if it has just been "
-"instantiated using the <literal>new</literal> operator, and it is not "
-"associated with a Hibernate <literal>Session</literal>. It has no persistent "
-"representation in the database and no identifier value has been assigned. "
-"Transient instances will be destroyed by the garbage collector if the "
-"application does not hold a reference anymore. Use the Hibernate "
-"<literal>Session</literal> to make an object persistent (and let Hibernate "
-"take care of the SQL statements that need to be executed for this "
-"transition)."
-msgstr ""
-"<emphasis>瞬时(Transient)</emphasis> — 由 <literal>new</literal> 操作符创"
-"建,且尚未与Hibernate <literal>Session</literal> 关联的对象被认定为瞬时"
-"(Transient)的。瞬时(Transient)对象不会被持久化到数据库中,也不会被赋予持"
-"久化标识(identifier)。 如果瞬时(Transient)对象在程序中没有被引用,它会被"
-"垃圾回收器(garbage collector)销毁。 使用 Hibernate <literal>Session</"
-"literal>可以将其变为持久(Persistent)状态。(Hibernate会自动执行必要的SQL语"
-"句) "
+msgid "<emphasis>Transient</emphasis> - an object is transient if it has just been instantiated using the <literal>new</literal> operator, and it is not associated with a Hibernate <literal>Session</literal>. It has no persistent representation in the database and no identifier value has been assigned. Transient instances will be destroyed by the garbage collector if the application does not hold a reference anymore. Use the Hibernate <literal>Session</literal> to make an object persistent (and let Hibernate take care of the SQL statements that need to be executed for this transition)."
+msgstr "<emphasis>瞬时(Transient)</emphasis> — 由 <literal>new</literal> 操作符创建,且尚未与Hibernate <literal>Session</literal> 关联的对象被认定为瞬时(Transient)的。瞬时(Transient)对象不会被持久化到数据库中,也不会被赋予持久化标识(identifier)。 如果瞬时(Transient)对象在程序中没有被引用,它会被垃圾回收器(garbage collector)销毁。 使用 Hibernate <literal>Session</literal>可以将其变为持久(Persistent)状态。(Hibernate会自动执行必要的SQL语句) "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Persistent</emphasis> - a persistent instance has a representation "
-"in the database and an identifier value. It might just have been saved or "
-"loaded, however, it is by definition in the scope of a <literal>Session</"
-"literal>. Hibernate will detect any changes made to an object in persistent "
-"state and synchronize the state with the database when the unit of work "
-"completes. Developers do not execute manual <literal>UPDATE</literal> "
-"statements, or <literal>DELETE</literal> statements when an object should be "
-"made transient."
-msgstr ""
-"<emphasis>持久(Persistent)</emphasis> — 持久(Persistent)的实例在数据库中"
-"有对应的记录,并拥有一个持久化标识(identifier)。 持久(Persistent)的实例可"
-"能是刚被保存的,或刚被加载的,无论哪一种,按定义,它存在于相关联的"
-"<literal>Session</literal>作用范围内。 Hibernate会检测到处于持久"
-"(Persistent)状态的对象的任何改动,在当前操作单元(unit of work)执行完毕时"
-"将对象数据(state)与数据库同步(synchronize)。 开发者不需要手动执行"
-"<literal>UPDATE</literal>。将对象从持久(Persistent)状态变成瞬时"
-"(Transient)状态同样也不需要手动执行 <literal>DELETE</literal> 语句。 "
+msgid "<emphasis>Persistent</emphasis> - a persistent instance has a representation in the database and an identifier value. It might just have been saved or loaded, however, it is by definition in the scope of a <literal>Session</literal>. Hibernate will detect any changes made to an object in persistent state and synchronize the state with the database when the unit of work completes. Developers do not execute manual <literal>UPDATE</literal> statements, or <literal>DELETE</literal> statements when an object should be made transient."
+msgstr "<emphasis>持久(Persistent)</emphasis> — 持久(Persistent)的实例在数据库中有对应的记录,并拥有一个持久化标识(identifier)。 持久(Persistent)的实例可能是刚被保存的,或刚被加载的,无论哪一种,按定义,它存在于相关联的<literal>Session</literal>作用范围内。 Hibernate会检测到处于持久(Persistent)状态的对象的任何改动,在当前操作单元(unit of work)执行完毕时将对象数据(state)与数据库同步(synchronize)。 开发者不需要手动执行<literal>UPDATE</literal>。将对象从持久(Persistent)状态变成瞬时(Transient)状态同样也不需要手动执行 <literal>DELETE</literal> 语句。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Detached</emphasis> - a detached instance is an object that has "
-"been persistent, but its <literal>Session</literal> has been closed. The "
-"reference to the object is still valid, of course, and the detached instance "
-"might even be modified in this state. A detached instance can be reattached "
-"to a new <literal>Session</literal> at a later point in time, making it (and "
-"all the modifications) persistent again. This feature enables a programming "
-"model for long running units of work that require user think-time. We call "
-"them <emphasis>application transactions</emphasis>, i.e., a unit of work "
-"from the point of view of the user."
-msgstr ""
-"<emphasis>脱管(Detached)</emphasis> — 与持久(Persistent)对象关联的"
-"<literal>Session</literal>被关闭后,对象就变为脱管(Detached)的。对脱管"
-"(Detached)对象的引用依然有效,对象可继续被修改。脱管(Detached)对象如果重"
-"新关联到某个新的 <literal>Session</literal> 上, 会再次转变为持久"
-"(Persistent)的(在Detached其间的改动将被持久化到数据库)。 这个功能使得一种"
-"编程模型,即中间会给用户思考时间(user think-time)的长时间运行的操作单元"
-"(unit of work)的编程模型成为可能。我们称之为<emphasis>应用程序事务</"
-"emphasis>,即从用户观点看是一个操作单元(unit of work)。 "
+msgid "<emphasis>Detached</emphasis> - a detached instance is an object that has been persistent, but its <literal>Session</literal> has been closed. The reference to the object is still valid, of course, and the detached instance might even be modified in this state. A detached instance can be reattached to a new <literal>Session</literal> at a later point in time, making it (and all the modifications) persistent again. This feature enables a programming model for long running units of work that require user think-time. We call them <emphasis>application transactions</emphasis>, i.e., a unit of work from the point of view of the user."
+msgstr "<emphasis>脱管(Detached)</emphasis> — 与持久(Persistent)对象关联的<literal>Session</literal>被关闭后,对象就变为脱管(Detached)的。对脱管(Detached)对象的引用依然有效,对象可继续被修改。脱管(Detached)对象如果重新关联到某个新的 <literal>Session</literal> 上, 会再次转变为持久(Persistent)的(在Detached其间的改动将被持久化到数据库)。 这个功能使得一种编程模型,即中间会给用户思考时间(user think-time)的长时间运行的操作单元(unit of work)的编程模型成为可能。我们称之为<emphasis>应用程序事务</emphasis>,即从用户观点看是一个操作单元(unit of work)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"We will now discuss the states and state transitions (and the Hibernate "
-"methods that trigger a transition) in more detail."
-msgstr ""
-"接下来我们来细致地讨论下状态(states)及状态间的转换(state transitions)(以"
-"及触发状态转换的 Hibernate 方法)。 "
+msgid "We will now discuss the states and state transitions (and the Hibernate methods that trigger a transition) in more detail."
+msgstr "接下来我们来细致地讨论下状态(states)及状态间的转换(state transitions)(以及触发状态转换的 Hibernate 方法)。 "
#. Tag: title
#, no-c-format
@@ -137,105 +66,39 @@
msgstr "使对象持久化"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Newly instantiated instances of a persistent class are considered "
-"<emphasis>transient</emphasis> by Hibernate. We can make a transient "
-"instance <emphasis>persistent</emphasis> by associating it with a session:"
-msgstr ""
-"Hibernate 认为持久化类(persistent class)新实例化的对象是<emphasis>瞬时"
-"(Transient)</emphasis>的。我们可通过将瞬时(Transient)对象与 session 关联"
-"而把它变为<emphasis>持久(Persistent)</emphasis>的。"
+#, no-c-format
+msgid "Newly instantiated instances of a persistent class are considered <emphasis>transient</emphasis> by Hibernate. We can make a transient instance <emphasis>persistent</emphasis> by associating it with a session:"
+msgstr "Hibernate 认为持久化类(persistent class)新实例化的对象是<emphasis>瞬时(Transient)</emphasis>的。我们可通过将瞬时(Transient)对象与 session 关联而把它变为<emphasis>持久的(Persistent)</emphasis>。"
#. Tag: para
#, no-c-format
-msgid ""
-"If <literal>Cat</literal> has a generated identifier, the identifier is "
-"generated and assigned to the <literal>cat</literal> when <literal>save()</"
-"literal> is called. If <literal>Cat</literal> has an <literal>assigned</"
-"literal> identifier, or a composite key, the identifier should be assigned "
-"to the <literal>cat</literal> instance before calling <literal>save()</"
-"literal>. You can also use <literal>persist()</literal> instead of "
-"<literal>save()</literal>, with the semantics defined in the EJB3 early "
-"draft."
-msgstr ""
-"如果 <literal>Cat</literal> 的持久化标识(identifier)是 <literal>generated</"
-"literal> 类型的, 那么该标识(identifier)会自动在 <literal>save()</literal> "
-"被调用时产生并分配给 <literal>cat</literal>。如果 <literal>Cat</literal> 的持"
-"久化标识(identifier)是<literal>assigned</literal>类型的,或是一个复合主键"
-"(composite key),那么该标识(identifier)应当在调用 <literal>save()</"
-"literal> 之前手动赋予给 <literal>cat</literal>。你也可以按照 EJB3 early "
-"draft 中定义的语义,使用 <literal>persist()</literal> 替代<literal>save()</"
-"literal>。 "
+msgid "If <literal>Cat</literal> has a generated identifier, the identifier is generated and assigned to the <literal>cat</literal> when <literal>save()</literal> is called. If <literal>Cat</literal> has an <literal>assigned</literal> identifier, or a composite key, the identifier should be assigned to the <literal>cat</literal> instance before calling <literal>save()</literal>. You can also use <literal>persist()</literal> instead of <literal>save()</literal>, with the semantics defined in the EJB3 early draft."
+msgstr "如果 <literal>Cat</literal> 的持久化标识(identifier)是 <literal>generated</literal> 类型的, 那么该标识(identifier)会自动在 <literal>save()</literal> 被调用时产生并分配给 <literal>cat</literal>。如果 <literal>Cat</literal> 的持久化标识(identifier)是<literal>assigned</literal>类型的,或是一个复合主键(composite key),那么该标识(identifier)应当在调用 <literal>save()</literal> 之前手动赋予给 <literal>cat</literal>。你也可以按照 EJB3 early draft 中定义的语义,使用 <literal>persist()</literal> 替代<literal>save()</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>persist()</literal> makes a transient instance persistent. However, "
-"it does not guarantee that the identifier value will be assigned to the "
-"persistent instance immediately, the assignment might happen at flush time. "
-"<literal>persist()</literal> also guarantees that it will not execute an "
-"<literal>INSERT</literal> statement if it is called outside of transaction "
-"boundaries. This is useful in long-running conversations with an extended "
-"Session/persistence context."
-msgstr ""
-"<literal>persist()</literal> 使一个临时实例持久化。然而,它不保证立即把标识符"
-"值分配给持久性实例,这会发生在冲刷(flush)的时候。<literal>persist()</"
-"literal> 也保证它在事务边界外调用时不会执行 <literal>INSERT</literal> 语句。"
-"这对于长期运行的带有扩展会话/持久化上下文的会话是很有用的。"
+msgid "<literal>persist()</literal> makes a transient instance persistent. However, it does not guarantee that the identifier value will be assigned to the persistent instance immediately, the assignment might happen at flush time. <literal>persist()</literal> also guarantees that it will not execute an <literal>INSERT</literal> statement if it is called outside of transaction boundaries. This is useful in long-running conversations with an extended Session/persistence context."
+msgstr "<literal>persist()</literal> 使一个临时实例持久化。然而,它不保证立即把标识符值分配给持久性实例,这会发生在冲刷(flush)的时候。<literal>persist()</literal> 也保证它在事务边界外调用时不会执行 <literal>INSERT</literal> 语句。这对于长期运行的带有扩展会话/持久化上下文的会话是很有用的。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>save()</literal> does guarantee to return an identifier. If an "
-"INSERT has to be executed to get the identifier ( e.g. \"identity\" "
-"generator, not \"sequence\"), this INSERT happens immediately, no matter if "
-"you are inside or outside of a transaction. This is problematic in a long-"
-"running conversation with an extended Session/persistence context."
-msgstr ""
-"<literal>save()</literal> 保证返回一个标识符。如果需要运行 INSERT 来获取标识"
-"符(如 \"identity\" 而非 \"sequence\" 生成器),这个 INSERT 将立即执行,不管"
-"你是否在事务内部还是外部。这对于长期运行的带有扩展会话/持久化上下文的会话来说"
-"会出现问题。"
+msgid "<literal>save()</literal> does guarantee to return an identifier. If an INSERT has to be executed to get the identifier ( e.g. \"identity\" generator, not \"sequence\"), this INSERT happens immediately, no matter if you are inside or outside of a transaction. This is problematic in a long-running conversation with an extended Session/persistence context."
+msgstr "<literal>save()</literal> 保证返回一个标识符。如果需要运行 INSERT 来获取标识符(如 \"identity\" 而非 \"sequence\" 生成器),这个 INSERT 将立即执行,不管你是否在事务内部还是外部。这对于长期运行的带有扩展会话/持久化上下文的会话来说会出现问题。"
#. Tag: para
#, no-c-format
-msgid ""
-"Alternatively, you can assign the identifier using an overloaded version of "
-"<literal>save()</literal>."
+msgid "Alternatively, you can assign the identifier using an overloaded version of <literal>save()</literal>."
msgstr "此外,你可以用一个重载版本的 <literal>save()</literal> 方法。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If the object you make persistent has associated objects (e.g. the "
-"<literal>kittens</literal> collection in the previous example), these "
-"objects can be made persistent in any order you like unless you have a "
-"<literal>NOT NULL</literal> constraint upon a foreign key column. There is "
-"never a risk of violating foreign key constraints. However, you might "
-"violate a <literal>NOT NULL</literal> constraint if you <literal>save()</"
-"literal> the objects in the wrong order."
-msgstr ""
-"如果你持久化的对象有关联的对象(associated objects)(例如上例中的 "
-"<literal>kittens</literal> 集合) 那么对这些对象(译注:pk 和 kittens)进行持"
-"久化的顺序是任意的(也就是说可以先对 kittens 进行持久化也可以先对 pk 进行持久"
-"化), 除非你在外键列上有 <literal>NOT NULL</literal> 约束。 Hibernate 不会违"
-"反外键约束,但是如果你用错误的顺序持久化对象(译注:在 pk 持久化之前持久化"
-"kitten),那么可能会违反 <literal>NOT NULL</literal> 约束。 "
+msgid "If the object you make persistent has associated objects (e.g. the <literal>kittens</literal> collection in the previous example), these objects can be made persistent in any order you like unless you have a <literal>NOT NULL</literal> constraint upon a foreign key column. There is never a risk of violating foreign key constraints. However, you might violate a <literal>NOT NULL</literal> constraint if you <literal>save()</literal> the objects in the wrong order."
+msgstr "如果你持久化的对象有关联的对象(associated objects)(例如上例中的 <literal>kittens</literal> 集合) 那么对这些对象(译注:pk 和 kittens)进行持久化的顺序是任意的(也就是说可以先对 kittens 进行持久化也可以先对 pk 进行持久化), 除非你在外键列上有 <literal>NOT NULL</literal> 约束。 Hibernate 不会违反外键约束,但是如果你用错误的顺序持久化对象(译注:在 pk 持久化之前持久化kitten),那么可能会违反 <literal>NOT NULL</literal> 约束。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Usually you do not bother with this detail, as you will normally use "
-"Hibernate's <emphasis>transitive persistence</emphasis> feature to save the "
-"associated objects automatically. Then, even <literal>NOT NULL</literal> "
-"constraint violations do not occur - Hibernate will take care of everything. "
-"Transitive persistence is discussed later in this chapter."
-msgstr ""
-"通常你不会为这些细节烦心,因为你很可能会使用 Hibernate 的<emphasis>传播性持久"
-"化(transitive persistence)</emphasis>功能自动保存相关联那些对象。这样连违"
-"反 <literal>NOT NULL</literal> 约束的情况都不会出现了 — Hibernate 会管好所有"
-"的事情。传播性持久化(transitive persistence)将在本章稍后讨论。"
+msgid "Usually you do not bother with this detail, as you will normally use Hibernate's <emphasis>transitive persistence</emphasis> feature to save the associated objects automatically. Then, even <literal>NOT NULL</literal> constraint violations do not occur - Hibernate will take care of everything. Transitive persistence is discussed later in this chapter."
+msgstr "通常你不会为这些细节烦心,因为你很可能会使用 Hibernate 的<emphasis>传播性持久化(transitive persistence)</emphasis>功能自动保存相关联那些对象。这样连违反 <literal>NOT NULL</literal> 约束的情况都不会出现了 — Hibernate 会管好所有的事情。传播性持久化(transitive persistence)将在本章稍后讨论。"
#. Tag: title
#, no-c-format
@@ -244,100 +107,43 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>load()</literal> methods of <literal>Session</literal> provide "
-"a way of retrieving a persistent instance if you know its identifier. "
-"<literal>load()</literal> takes a class object and loads the state into a "
-"newly instantiated instance of that class in a persistent state."
-msgstr ""
-"如果你知道某个实例的持久化标识(identifier),你就可以使用 <literal>Session</"
-"literal> 的 <literal>load()</literal> 方法来获取它。<literal>load()</"
-"literal> 的另一个参数是指定类的对象。本方法会创建指定类的持久化实例,并从数据"
-"库加载其数据(state)。 "
+msgid "The <literal>load()</literal> methods of <literal>Session</literal> provide a way of retrieving a persistent instance if you know its identifier. <literal>load()</literal> takes a class object and loads the state into a newly instantiated instance of that class in a persistent state."
+msgstr "如果你知道某个实例的持久化标识(identifier),你就可以使用 <literal>Session</literal> 的 <literal>load()</literal> 方法来获取它。<literal>load()</literal> 的另一个参数是指定类的对象。本方法会创建指定类的持久化实例,并从数据库加载其数据(state)。 "
#. Tag: para
#, no-c-format
msgid "Alternatively, you can load state into a given instance:"
-msgstr ""
-"此外,你可以把数据(state)加载到指定的对象实例上(覆盖掉该实例原来的数据)。"
+msgstr "此外,你可以把数据(state)加载到指定的对象实例上(覆盖掉该实例原来的数据)。"
#. Tag: para
#, no-c-format
-msgid ""
-"Be aware that <literal>load()</literal> will throw an unrecoverable "
-"exception if there is no matching database row. If the class is mapped with "
-"a proxy, <literal>load()</literal> just returns an uninitialized proxy and "
-"does not actually hit the database until you invoke a method of the proxy. "
-"This is useful if you wish to create an association to an object without "
-"actually loading it from the database. It also allows multiple instances to "
-"be loaded as a batch if <literal>batch-size</literal> is defined for the "
-"class mapping."
-msgstr ""
-"请注意如果没有匹配的数据库记录,<literal>load()</literal> 方法可能抛出无法恢"
-"复的异常(unrecoverable exception)。如果类的映射使用了代理(proxy),"
-"<literal>load()</literal> 方法会返回一个未初始化的代理,直到你调用该代理的某"
-"方法时才会去访问数据库。 若你希望在某对象中创建一个指向另一个对象的关联,又不"
-"想在从数据库中装载该对象时同时装载相关联的那个对象,那么这种操作方式就用得上"
-"的了。如果为相应类映射关系设置了 <literal>batch-size</literal>,那么使用这种"
-"操作方式允许多个对象被一批装载(因为返回的是代理,无需从数据库中抓取所有对象"
-"的数据)。 "
+msgid "Be aware that <literal>load()</literal> will throw an unrecoverable exception if there is no matching database row. If the class is mapped with a proxy, <literal>load()</literal> just returns an uninitialized proxy and does not actually hit the database until you invoke a method of the proxy. This is useful if you wish to create an association to an object without actually loading it from the database. It also allows multiple instances to be loaded as a batch if <literal>batch-size</literal> is defined for the class mapping."
+msgstr "请注意如果没有匹配的数据库记录,<literal>load()</literal> 方法可能抛出无法恢复的异常(unrecoverable exception)。如果类的映射使用了代理(proxy),<literal>load()</literal> 方法会返回一个未初始化的代理,直到你调用该代理的某方法时才会去访问数据库。 若你希望在某对象中创建一个指向另一个对象的关联,又不想在从数据库中装载该对象时同时装载相关联的那个对象,那么这种操作方式就用得上的了。如果为相应类映射关系设置了 <literal>batch-size</literal>,那么使用这种操作方式允许多个对象被一批装载(因为返回的是代理,无需从数据库中抓取所有对象的数据)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you are not certain that a matching row exists, you should use the "
-"<literal>get()</literal> method which hits the database immediately and "
-"returns null if there is no matching row."
-msgstr ""
-"如果你不确定是否有匹配的行存在,应该使用 <literal>get()</literal> 方法,它会"
-"立刻访问数据库,如果没有对应的记录,会返回 null。 "
+msgid "If you are not certain that a matching row exists, you should use the <literal>get()</literal> method which hits the database immediately and returns null if there is no matching row."
+msgstr "如果你不确定是否有匹配的行存在,应该使用 <literal>get()</literal> 方法,它会立刻访问数据库,如果没有对应的记录,会返回 null。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can even load an object using an SQL <literal>SELECT ... FOR UPDATE</"
-"literal>, using a <literal>LockMode</literal>. See the API documentation for "
-"more information."
-msgstr ""
-"你甚至可以选用某个 <literal>LockMode</literal>,用 SQL 的 "
-"<literal>SELECT ... FOR UPDATE</literal> 装载对象。 请查阅 API 文档以获取更多"
-"信息。 "
+msgid "You can even load an object using an SQL <literal>SELECT ... FOR UPDATE</literal>, using a <literal>LockMode</literal>. See the API documentation for more information."
+msgstr "你甚至可以选用某个 <literal>LockMode</literal>,用 SQL 的 <literal>SELECT ... FOR UPDATE</literal> 装载对象。 请查阅 API 文档以获取更多信息。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Any associated instances or contained collections will <emphasis>not</"
-"emphasis> be selected <literal>FOR UPDATE</literal>, unless you decide to "
-"specify <literal>lock</literal> or <literal>all</literal> as a cascade style "
-"for the association."
-msgstr ""
-"注意,任何关联的对象或者包含的集合都<emphasis>不会</emphasis>被以 "
-"<literal>FOR UPDATE</literal> 方式返回, 除非你指定了 <literal>lock</"
-"literal> 或者 <literal>all</literal> 作为关联(association)的级联风格"
-"(cascade style)。 "
+msgid "Any associated instances or contained collections will <emphasis>not</emphasis> be selected <literal>FOR UPDATE</literal>, unless you decide to specify <literal>lock</literal> or <literal>all</literal> as a cascade style for the association."
+msgstr "注意,任何关联的对象或者包含的集合都<emphasis>不会</emphasis>被以 <literal>FOR UPDATE</literal> 方式返回, 除非你指定了 <literal>lock</literal> 或者 <literal>all</literal> 作为关联(association)的级联风格(cascade style)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"It is possible to re-load an object and all its collections at any time, "
-"using the <literal>refresh()</literal> method. This is useful when database "
-"triggers are used to initialize some of the properties of the object."
-msgstr ""
-"任何时候都可以使用 <literal>refresh()</literal> 方法强迫装载对象和它的集合。"
-"如果你使用数据库触发器功能来处理对象的某些属性,这个方法就很有用了。"
+msgid "It is possible to re-load an object and all its collections at any time, using the <literal>refresh()</literal> method. This is useful when database triggers are used to initialize some of the properties of the object."
+msgstr "任何时候都可以使用 <literal>refresh()</literal> 方法强迫装载对象和它的集合。如果你使用数据库触发器功能来处理对象的某些属性,这个方法就很有用了。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"How much does Hibernate load from the database and how many SQL "
-"<literal>SELECT</literal>s will it use? This depends on the "
-"<emphasis>fetching strategy</emphasis>. This is explained in <xref linkend="
-"\"performance-fetching\" />."
-msgstr ""
-"此处通常会出现一个重要问题: Hibernate 会从数据库中装载多少东西?会执行多少条"
-"相应的 SQL<literal>SELECT</literal> 语句?这取决于<emphasis>抓取策略"
-"(fetching strategy)</emphasis>,我们会在 <xref linkend=\"performance-"
-"fetching\" /> 中解释。 "
+#, no-c-format
+msgid "How much does Hibernate load from the database and how many SQL <literal>SELECT</literal>s will it use? This depends on the <emphasis>fetching strategy</emphasis>. This is explained in <xref linkend=\"performance-fetching\" />."
+msgstr "此处通常会出现一个重要问题: Hibernate 会从数据库中装载多少东西?会执行多少条相应的 SQL<literal>SELECT</literal> 语句?这取决于<emphasis>抓取策略(fetching strategy)</emphasis>,我们会在 <xref linkend=\"performance-fetching\" /> 中解释。 "
#. Tag: title
#, no-c-format
@@ -346,19 +152,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If you do not know the identifiers of the objects you are looking for, you "
-"need a query. Hibernate supports an easy-to-use but powerful object oriented "
-"query language (HQL). For programmatic query creation, Hibernate supports a "
-"sophisticated Criteria and Example query feature (QBC and QBE). You can also "
-"express your query in the native SQL of your database, with optional support "
-"from Hibernate for result set conversion into objects."
-msgstr ""
-"如果不知道所要寻找的对象的持久化标识,那么你需要使用查询。Hibernate 支持强大"
-"且易于使用的面向对象查询语言(HQL)。如果希望通过编程的方式创建查询,"
-"Hibernate 提供了完善的按条件(Query By Criteria,QBC)以及按样例(Query By "
-"Example,QBE)进行查询的功能。你也可以用原生 SQL(native SQL)描述查询,"
-"Hibernate 额外提供了将结果集(result set)转化为对象的支持。 "
+msgid "If you do not know the identifiers of the objects you are looking for, you need a query. Hibernate supports an easy-to-use but powerful object oriented query language (HQL). For programmatic query creation, Hibernate supports a sophisticated Criteria and Example query feature (QBC and QBE). You can also express your query in the native SQL of your database, with optional support from Hibernate for result set conversion into objects."
+msgstr "如果不知道所要寻找的对象的持久化标识,那么你需要使用查询。Hibernate 支持强大且易于使用的面向对象查询语言(HQL)。如果希望通过编程的方式创建查询,Hibernate 提供了完善的按条件(Query By Criteria,QBC)以及按样例(Query By Example,QBE)进行查询的功能。你也可以用原生 SQL(native SQL)描述查询,Hibernate 额外提供了将结果集(result set)转化为对象的支持。 "
#. Tag: title
#, no-c-format
@@ -367,36 +162,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"HQL and native SQL queries are represented with an instance of <literal>org."
-"hibernate.Query</literal>. This interface offers methods for parameter "
-"binding, result set handling, and for the execution of the actual query. You "
-"always obtain a <literal>Query</literal> using the current <literal>Session</"
-"literal>:"
-msgstr ""
-"HQL 和原生 SQL(native SQL)查询要通过为 <literal>org.hibernate.Query</"
-"literal> 的实例来表达。 这个接口提供了参数绑定、结果集处理以及运行实际查询的"
-"方法。你总是可以通过当前 <literal>Session</literal> 获取一个 <literal>Query</"
-"literal> 对象:"
+msgid "HQL and native SQL queries are represented with an instance of <literal>org.hibernate.Query</literal>. This interface offers methods for parameter binding, result set handling, and for the execution of the actual query. You always obtain a <literal>Query</literal> using the current <literal>Session</literal>:"
+msgstr "HQL 和原生 SQL(native SQL)查询要通过为 <literal>org.hibernate.Query</literal> 的实例来表达。 这个接口提供了参数绑定、结果集处理以及运行实际查询的方法。你总是可以通过当前 <literal>Session</literal> 获取一个 <literal>Query</literal> 对象:"
#. Tag: para
#, no-c-format
-msgid ""
-"A query is usually executed by invoking <literal>list()</literal>. The "
-"result of the query will be loaded completely into a collection in memory. "
-"Entity instances retrieved by a query are in a persistent state. The "
-"<literal>uniqueResult()</literal> method offers a shortcut if you know your "
-"query will only return a single object. Queries that make use of eager "
-"fetching of collections usually return duplicates of the root objects, but "
-"with their collections initialized. You can filter these duplicates through "
-"a <literal>Set</literal>."
-msgstr ""
-"一个查询通常在调用 <literal>list()</literal> 时被执行,执行结果会完全装载进内"
-"存中的一个集合(collection)。查询返回的对象处于持久(persistent)状态。如果"
-"你知道的查询只会返回一个对象,可使用 <literal>list()</literal> 的快捷方式 "
-"<literal>uniqueResult()</literal>。注意,使用集合预先抓取的查询往往会返回多次"
-"根对象(他们的集合类都被初始化了)。你可以通过一个<literal>集合(Set)</"
-"literal>来过滤这些重复对象。"
+msgid "A query is usually executed by invoking <literal>list()</literal>. The result of the query will be loaded completely into a collection in memory. Entity instances retrieved by a query are in a persistent state. The <literal>uniqueResult()</literal> method offers a shortcut if you know your query will only return a single object. Queries that make use of eager fetching of collections usually return duplicates of the root objects, but with their collections initialized. You can filter these duplicates through a <literal>Set</literal>."
+msgstr "一个查询通常在调用 <literal>list()</literal> 时被执行,执行结果会完全装载进内存中的一个集合(collection)。查询返回的对象处于持久(persistent)状态。如果你知道的查询只会返回一个对象,可使用 <literal>list()</literal> 的快捷方式 <literal>uniqueResult()</literal>。注意,使用集合预先抓取的查询往往会返回多次根对象(他们的集合类都被初始化了)。你可以通过一个<literal>集合(Set)</literal>来过滤这些重复对象。"
#. Tag: title
#, no-c-format
@@ -405,24 +177,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Occasionally, you might be able to achieve better performance by executing "
-"the query using the <literal>iterate()</literal> method. This will usually "
-"be the case if you expect that the actual entity instances returned by the "
-"query will already be in the session or second-level cache. If they are not "
-"already cached, <literal>iterate()</literal> will be slower than "
-"<literal>list()</literal> and might require many database hits for a simple "
-"query, usually <emphasis>1</emphasis> for the initial select which only "
-"returns identifiers, and <emphasis>n</emphasis> additional selects to "
-"initialize the actual instances."
-msgstr ""
-"某些情况下,你可以使用 <literal>iterate()</literal> 方法得到更好的性能。 这通"
-"常是你预期返回的结果在 session,或二级缓存(second-level cache)中已经存在时"
-"的情况。如若不然,<literal>iterate()</literal> 会比 <literal>list()</"
-"literal> 慢,而且可能简单查询也需要进行多次数据库访问:<literal>iterate()</"
-"literal> 会首先使用 <emphasis>1</emphasis> 条语句得到所有对象的持久化标识"
-"(identifiers),再根据持久化标识执行 <emphasis>n</emphasis> 条附加的 select "
-"语句实例化实际的对象。 "
+msgid "Occasionally, you might be able to achieve better performance by executing the query using the <literal>iterate()</literal> method. This will usually be the case if you expect that the actual entity instances returned by the query will already be in the session or second-level cache. If they are not already cached, <literal>iterate()</literal> will be slower than <literal>list()</literal> and might require many database hits for a simple query, usually <emphasis>1</emphasis> for the initial select which only returns identifiers, and <emphasis>n</emphasis> additional selects to initialize the actual instances."
+msgstr "某些情况下,你可以使用 <literal>iterate()</literal> 方法得到更好的性能。 这通常是你预期返回的结果在 session,或二级缓存(second-level cache)中已经存在时的情况。如若不然,<literal>iterate()</literal> 会比 <literal>list()</literal> 慢,而且可能简单查询也需要进行多次数据库访问:<literal>iterate()</literal> 会首先使用 <emphasis>1</emphasis> 条语句得到所有对象的持久化标识(identifiers),再根据持久化标识执行 <emphasis>n</emphasis> 条附加的 select 语句实例化实际的对象。 "
#. Tag: title
#, no-c-format
@@ -431,12 +187,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate queries sometimes return tuples of objects. Each tuple is returned "
-"as an array:"
-msgstr ""
-"(译注:元组(tuples)指一条结果行包含多个对象) Hibernate 查询有时返回元组"
-"(tuples),每个元组(tuples)以数组的形式返回: "
+msgid "Hibernate queries sometimes return tuples of objects. Each tuple is returned as an array:"
+msgstr "(译注:元组(tuples)指一条结果行包含多个对象) Hibernate 查询有时返回元组(tuples),每个元组(tuples)以数组的形式返回: "
#. Tag: title
#, no-c-format
@@ -445,14 +197,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Queries can specify a property of a class in the <literal>select</literal> "
-"clause. They can even call SQL aggregate functions. Properties or aggregates "
-"are considered \"scalar\" results and not entities in persistent state."
-msgstr ""
-"查询可在 <literal>select</literal> 从句中指定类的属性,甚至可以调用 SQL 统计"
-"(aggregate)函数。属性或统计结果被认定为\"标量(Scalar)\"的结果(而不是持久"
-"(persistent state)的实体)。 "
+msgid "Queries can specify a property of a class in the <literal>select</literal> clause. They can even call SQL aggregate functions. Properties or aggregates are considered \"scalar\" results and not entities in persistent state."
+msgstr "查询可在 <literal>select</literal> 从句中指定类的属性,甚至可以调用 SQL 统计(aggregate)函数。属性或统计结果被认定为\"标量(Scalar)\"的结果(而不是持久(persistent state)的实体)。 "
#. Tag: title
#, no-c-format
@@ -461,23 +207,12 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Methods on <literal>Query</literal> are provided for binding values to named "
-"parameters or JDBC-style <literal>?</literal> parameters. <emphasis>Contrary "
-"to JDBC, Hibernate numbers parameters from zero.</emphasis> Named parameters "
-"are identifiers of the form <literal>:name</literal> in the query string. "
-"The advantages of named parameters are as follows:"
-msgstr ""
-"接口 <literal>Query</literal> 提供了对命名参数(named parameters)、JDBC 风格"
-"的<literal>问号(?)</literal>参数进行绑定的方法。<emphasis>不同于 JDBC,"
-"Hibernate 对参数从 0 开始计数。</emphasis> 命名参数(named parameters)在查询"
-"字符串中是形如 <literal>:name</literal> 的标识符。命名参数(named "
-"parameters)的优点是: "
+msgid "Methods on <literal>Query</literal> are provided for binding values to named parameters or JDBC-style <literal>?</literal> parameters. <emphasis>Contrary to JDBC, Hibernate numbers parameters from zero.</emphasis> Named parameters are identifiers of the form <literal>:name</literal> in the query string. The advantages of named parameters are as follows:"
+msgstr "接口 <literal>Query</literal> 提供了对命名参数(named parameters)、JDBC 风格的<literal>问号(?)</literal>参数进行绑定的方法。<emphasis>不同于 JDBC,Hibernate 对参数从 0 开始计数。</emphasis> 命名参数(named parameters)在查询字符串中是形如 <literal>:name</literal> 的标识符。命名参数(named parameters)的优点是: "
#. Tag: para
#, no-c-format
-msgid ""
-"named parameters are insensitive to the order they occur in the query string"
+msgid "named parameters are insensitive to the order they occur in the query string"
msgstr "命名参数(named parameters)与其在查询串中出现的顺序无关"
#. Tag: para
@@ -497,22 +232,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If you need to specify bounds upon your result set, that is, the maximum "
-"number of rows you want to retrieve and/or the first row you want to "
-"retrieve, you can use methods of the <literal>Query</literal> interface:"
-msgstr ""
-"如果你需要指定结果集的范围(希望返回的最大行数/或开始的行数),应该使用 "
-"<literal>Query</literal> 接口提供的方法: "
+msgid "If you need to specify bounds upon your result set, that is, the maximum number of rows you want to retrieve and/or the first row you want to retrieve, you can use methods of the <literal>Query</literal> interface:"
+msgstr "如果你需要指定结果集的范围(希望返回的最大行数/或开始的行数),应该使用 <literal>Query</literal> 接口提供的方法: "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate knows how to translate this limit query into the native SQL of "
-"your DBMS."
-msgstr ""
-"Hibernate 知道如何将这个有限定条件的查询转换成你的数据库的原生 SQL(native "
-"SQL)。"
+msgid "Hibernate knows how to translate this limit query into the native SQL of your DBMS."
+msgstr "Hibernate 知道如何将这个有限定条件的查询转换成你的数据库的原生 SQL(native SQL)。"
#. Tag: title
#, no-c-format
@@ -521,26 +247,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If your JDBC driver supports scrollable <literal>ResultSet</literal>s, the "
-"<literal>Query</literal> interface can be used to obtain a "
-"<literal>ScrollableResults</literal> object that allows flexible navigation "
-"of the query results."
-msgstr ""
-"如果你的 JDBC 驱动支持可滚动的 <literal>ResuleSet</literal>,<literal>Query</"
-"literal> 接口可以使用 <literal>ScrollableResults</literal>,允许你在查询结果"
-"中灵活游走。 "
+msgid "If your JDBC driver supports scrollable <literal>ResultSet</literal>s, the <literal>Query</literal> interface can be used to obtain a <literal>ScrollableResults</literal> object that allows flexible navigation of the query results."
+msgstr "如果你的 JDBC 驱动支持可滚动的 <literal>ResuleSet</literal>,<literal>Query</literal> 接口可以使用 <literal>ScrollableResults</literal>,允许你在查询结果中灵活游走。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Note that an open database connection and cursor is required for this "
-"functionality. Use <literal>setMaxResult()</literal>/<literal>setFirstResult"
-"()</literal> if you need offline pagination functionality."
-msgstr ""
-"请注意,使用此功能需要保持数据库连接(以及游标(cursor))处于一直打开状态。"
-"如果你需要断开连接使用分页功能,请使用 <literal>setMaxResult()</literal>/"
-"<literal>setFirstResult()</literal>。 "
+msgid "Note that an open database connection and cursor is required for this functionality. Use <literal>setMaxResult()</literal>/<literal>setFirstResult()</literal> if you need offline pagination functionality."
+msgstr "请注意,使用此功能需要保持数据库连接(以及游标(cursor))处于一直打开状态。如果你需要断开连接使用分页功能,请使用 <literal>setMaxResult()</literal>/<literal>setFirstResult()</literal>。 "
#. Tag: title
#, no-c-format
@@ -549,13 +262,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also define named queries in the mapping document. Remember to use a "
-"<literal>CDATA</literal> section if your query contains characters that "
-"could be interpreted as markup."
-msgstr ""
-"你可以在映射文件中定义命名查询(named queries)。如果你的查询串中包含可能被解"
-"释为 XML 标记(markup)的字符,别忘了用<literal>CDATA</literal>包裹起来。"
+msgid "You can also define named queries in the mapping document. Remember to use a <literal>CDATA</literal> section if your query contains characters that could be interpreted as markup."
+msgstr "你可以在映射文件中定义命名查询(named queries)。如果你的查询串中包含可能被解释为 XML 标记(markup)的字符,别忘了用<literal>CDATA</literal>包裹起来。"
#. Tag: para
#, no-c-format
@@ -564,28 +272,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The actual program code is independent of the query language that is used. "
-"You can also define native SQL queries in metadata, or migrate existing "
-"queries to Hibernate by placing them in mapping files."
-msgstr ""
-"请注意实际的程序代码与所用的查询语言无关,你也可在元数据中定义原生 SQL"
-"(native SQL)查询,或将原有的其他的查询语句放在配置文件中,这样就可以让 "
-"Hibernate 统一管理,达到迁移的目的。 "
+msgid "The actual program code is independent of the query language that is used. You can also define native SQL queries in metadata, or migrate existing queries to Hibernate by placing them in mapping files."
+msgstr "请注意实际的程序代码与所用的查询语言无关,你也可在元数据中定义原生 SQL(native SQL)查询,或将原有的其他的查询语句放在配置文件中,这样就可以让 Hibernate 统一管理,达到迁移的目的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Also note that a query declaration inside a <literal><hibernate-"
-"mapping></literal> element requires a global unique name for the query, "
-"while a query declaration inside a <literal><class></literal> element "
-"is made unique automatically by prepending the fully qualified name of the "
-"class. For example <literal>eg.Cat.ByNameAndMaximumWeight</literal>."
-msgstr ""
-"也请注意在 <literal><hibernate-mapping></literal> 元素中声明的查询必须"
-"有一个全局唯一的名字,而在 <literal><class></literal> 元素中声明的查询自"
-"动具有全局名,是通过类的全名加以限定的。比如 <literal>eg.Cat."
-"ByNameAndMaximumWeight</literal>。 "
+msgid "Also note that a query declaration inside a <literal><hibernate-mapping></literal> element requires a global unique name for the query, while a query declaration inside a <literal><class></literal> element is made unique automatically by prepending the fully qualified name of the class. For example <literal>eg.Cat.ByNameAndMaximumWeight</literal>."
+msgstr "也请注意在 <literal><hibernate-mapping></literal> 元素中声明的查询必须有一个全局唯一的名字,而在 <literal><class></literal> 元素中声明的查询自动具有全局名,是通过类的全名加以限定的。比如 <literal>eg.Cat.ByNameAndMaximumWeight</literal>。 "
#. Tag: title
#, no-c-format
@@ -594,43 +287,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A collection <emphasis>filter</emphasis> is a special type of query that can "
-"be applied to a persistent collection or array. The query string can refer "
-"to <literal>this</literal>, meaning the current collection element."
-msgstr ""
-"集合<emphasis>过滤器(filter)</emphasis>是一种用于一个持久化集合或者数组的特"
-"殊的查询。查询字符串中可以使用 <literal>\"this\"</literal> 来引用集合中的当前"
-"元素。 "
+msgid "A collection <emphasis>filter</emphasis> is a special type of query that can be applied to a persistent collection or array. The query string can refer to <literal>this</literal>, meaning the current collection element."
+msgstr "集合<emphasis>过滤器(filter)</emphasis>是一种用于一个持久化集合或者数组的特殊的查询。查询字符串中可以使用 <literal>\"this\"</literal> 来引用集合中的当前元素。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The returned collection is considered a bag that is a copy of the given "
-"collection. The original collection is not modified. This is contrary to the "
-"implication of the name \"filter\", but consistent with expected behavior."
-msgstr ""
-"返回的集合可以被认为是一个包(bag,无顺序可重复的集合(collection)),它是所"
-"给集合的副本。 原来的集合不会被改动(这与“过滤器(filter)”的隐含的含义不符,"
-"不过与我们期待的行为一致)。 "
+msgid "The returned collection is considered a bag that is a copy of the given collection. The original collection is not modified. This is contrary to the implication of the name \"filter\", but consistent with expected behavior."
+msgstr "返回的集合可以被认为是一个包(bag,无顺序可重复的集合(collection)),它是所给集合的副本。 原来的集合不会被改动(这与“过滤器(filter)”的隐含的含义不符,不过与我们期待的行为一致)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Observe that filters do not require a <literal>from</literal> clause, "
-"although they can have one if required. Filters are not limited to returning "
-"the collection elements themselves."
-msgstr ""
-"请注意过滤器(filter)并不需要 <literal>from</literal> 子句(当然需要的话它们"
-"也可以加上)。过滤器(filter)不限定于只能返回集合元素本身。 "
+msgid "Observe that filters do not require a <literal>from</literal> clause, although they can have one if required. Filters are not limited to returning the collection elements themselves."
+msgstr "请注意过滤器(filter)并不需要 <literal>from</literal> 子句(当然需要的话它们也可以加上)。过滤器(filter)不限定于只能返回集合元素本身。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Even an empty filter query is useful, e.g. to load a subset of elements in a "
-"large collection:"
-msgstr ""
-"即使无条件的过滤器(filter)也是有意义的。例如,用于加载一个大集合的子集: "
+msgid "Even an empty filter query is useful, e.g. to load a subset of elements in a large collection:"
+msgstr "即使无条件的过滤器(filter)也是有意义的。例如,用于加载一个大集合的子集: "
#. Tag: title
#, no-c-format
@@ -639,25 +312,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"HQL is extremely powerful, but some developers prefer to build queries "
-"dynamically using an object-oriented API, rather than building query "
-"strings. Hibernate provides an intuitive <literal>Criteria</literal> query "
-"API for these cases:"
-msgstr ""
-"HQL 极为强大,但是有些人希望能够动态的使用一种面向对象 API 创建查询,而非在他"
-"们的 Java 代码中嵌入字符串。对于那部分人来说,Hibernate 提供了直观的 "
-"<literal>Criteria</literal> 查询 API。 "
+msgid "HQL is extremely powerful, but some developers prefer to build queries dynamically using an object-oriented API, rather than building query strings. Hibernate provides an intuitive <literal>Criteria</literal> query API for these cases:"
+msgstr "HQL 极为强大,但是有些人希望能够动态的使用一种面向对象 API 创建查询,而非在他们的 Java 代码中嵌入字符串。对于那部分人来说,Hibernate 提供了直观的 <literal>Criteria</literal> 查询 API。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The <literal>Criteria</literal> and the associated <literal>Example</"
-"literal> API are discussed in more detail in <xref linkend=\"querycriteria"
-"\" />."
-msgstr ""
-"<literal>Criteria</literal> 以及相关的<literal>样例(Example)</literal>API "
-"将会在 <xref linkend=\"querycriteria\"/> 中详细讨论。 "
+#, no-c-format
+msgid "The <literal>Criteria</literal> and the associated <literal>Example</literal> API are discussed in more detail in <xref linkend=\"querycriteria\" />."
+msgstr "<literal>Criteria</literal> 以及相关的<literal>样例(Example)</literal>API 将会在 <xref linkend=\"querycriteria\"/> 中详细讨论。"
#. Tag: title
#, no-c-format
@@ -666,28 +327,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can express a query in SQL, using <literal>createSQLQuery()</literal> "
-"and let Hibernate manage the mapping from result sets to objects. You can at "
-"any time call <literal>session.connection()</literal> and use the JDBC "
-"<literal>Connection</literal> directly. If you choose to use the Hibernate "
-"API, you must enclose SQL aliases in braces:"
-msgstr ""
-"你可以使用 <literal>createSQLQuery()</literal> 方法,用 SQL 来描述查询,并由 "
-"Hibernate 将结果集转换成对象。请注意,你可以在任何时候调用 <literal>session."
-"connection()</literal> 来获得并使用 JDBC <literal>Connection</literal> 对"
-"象。 如果你选择使用 Hibernate 的 API,你必须把 SQL 别名用大括号包围起来: "
+msgid "You can express a query in SQL, using <literal>createSQLQuery()</literal> and let Hibernate manage the mapping from result sets to objects. You can at any time call <literal>session.connection()</literal> and use the JDBC <literal>Connection</literal> directly. If you choose to use the Hibernate API, you must enclose SQL aliases in braces:"
+msgstr "你可以使用 <literal>createSQLQuery()</literal> 方法,用 SQL 来描述查询,并由 Hibernate 将结果集转换成对象。请注意,你可以在任何时候调用 <literal>session.connection()</literal> 来获得并使用 JDBC <literal>Connection</literal> 对象。 如果你选择使用 Hibernate 的 API,你必须把 SQL 别名用大括号包围起来: "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"SQL queries can contain named and positional parameters, just like Hibernate "
-"queries. More information about native SQL queries in Hibernate can be found "
-"in <xref linkend=\"querysql\" />."
-msgstr ""
-"和 Hibernate 查询一样,SQL 查询也可以包含命名参数和占位参数。可以在 <xref "
-"linkend=\"querysql\"/> 找到更多关于 Hibernate 中原生 SQL(native SQL)的信"
-"息。 "
+#, no-c-format
+msgid "SQL queries can contain named and positional parameters, just like Hibernate queries. More information about native SQL queries in Hibernate can be found in <xref linkend=\"querysql\" />."
+msgstr "和 Hibernate 查询一样,SQL 查询也可以包含命名参数和占位参数。可以在 <xref linkend=\"querysql\"/> 找到更多关于 Hibernate 中原生 SQL(native SQL)的信息。"
#. Tag: title
#, no-c-format
@@ -696,61 +342,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Transactional persistent instances</emphasis> (i.e. objects "
-"loaded, saved, created or queried by the <literal>Session</literal>) can be "
-"manipulated by the application, and any changes to persistent state will be "
-"persisted when the <literal>Session</literal> is <emphasis>flushed</"
-"emphasis>. This is discussed later in this chapter. There is no need to call "
-"a particular method (like <literal>update()</literal>, which has a different "
-"purpose) to make your modifications persistent. The most straightforward way "
-"to update the state of an object is to <literal>load()</literal> it and then "
-"manipulate it directly while the <literal>Session</literal> is open:"
-msgstr ""
-"<emphasis>事务中的持久实例</emphasis>(就是通过 <literal>session</literal> 装"
-"载、保存、创建或者查询出的对象) 被应用程序操作所造成的任何修改都会在 "
-"<literal>Session</literal> 被<emphasis>刷出(flushed)</emphasis>的时候被持久"
-"化(本章后面会详细讨论)。这里不需要调用某个特定的方法(比如 <literal>update"
-"()</literal>,设计它的目的是不同的)将你的修改持久化。所以最直接的更新一个对"
-"象的方法就是在 <literal>Session</literal> 处于打开状态时 <literal>load()</"
-"literal> 它,然后直接修改即可: "
+msgid "<emphasis>Transactional persistent instances</emphasis> (i.e. objects loaded, saved, created or queried by the <literal>Session</literal>) can be manipulated by the application, and any changes to persistent state will be persisted when the <literal>Session</literal> is <emphasis>flushed</emphasis>. This is discussed later in this chapter. There is no need to call a particular method (like <literal>update()</literal>, which has a different purpose) to make your modifications persistent. The most straightforward way to update the state of an object is to <literal>load()</literal> it and then manipulate it directly while the <literal>Session</literal> is open:"
+msgstr "<emphasis>事务中的持久实例</emphasis>(就是通过 <literal>session</literal> 装载、保存、创建或者查询出的对象) 被应用程序操作所造成的任何修改都会在 <literal>Session</literal> 被<emphasis>刷出(flushed)</emphasis>的时候被持久化(本章后面会详细讨论)。这里不需要调用某个特定的方法(比如 <literal>update()</literal>,设计它的目的是不同的)将你的修改持久化。所以最直接的更新一个对象的方法就是在 <literal>Session</literal> 处于打开状态时 <literal>load()</literal> 它,然后直接修改即可: "
#. Tag: para
#, no-c-format
-msgid ""
-"Sometimes this programming model is inefficient, as it requires in the same "
-"session both an SQL <literal>SELECT</literal> to load an object and an SQL "
-"<literal>UPDATE</literal> to persist its updated state. Hibernate offers an "
-"alternate approach by using detached instances."
-msgstr ""
-"有时这种程序模型效率低下,因为它在同一 Session 里需要一条 SQL "
-"<literal>SELECT</literal> 语句(用于加载对象) 以及一条 SQL <literal>UPDATE</"
-"literal> 语句(持久化更新的状态)。为此 Hibernate 提供了另一种途径,使用脱管"
-"(detached)实例。 "
+msgid "Sometimes this programming model is inefficient, as it requires in the same session both an SQL <literal>SELECT</literal> to load an object and an SQL <literal>UPDATE</literal> to persist its updated state. Hibernate offers an alternate approach by using detached instances."
+msgstr "有时这种程序模型效率低下,因为它在同一 Session 里需要一条 SQL <literal>SELECT</literal> 语句(用于加载对象) 以及一条 SQL <literal>UPDATE</literal> 语句(持久化更新的状态)。为此 Hibernate 提供了另一种途径,使用脱管(detached)实例。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Hibernate does not offer its own API for direct execution of "
-"<literal>UPDATE</literal> or <literal>DELETE</literal> statements. Hibernate "
-"is a <emphasis>state management</emphasis> service, you do not have to think "
-"in <emphasis>statements</emphasis> to use it. JDBC is a perfect API for "
-"executing SQL statements, you can get a JDBC <literal>Connection</literal> "
-"at any time by calling <literal>session.connection()</literal>. Furthermore, "
-"the notion of mass operations conflicts with object/relational mapping for "
-"online transaction processing-oriented applications. Future versions of "
-"Hibernate can, however, provide special mass operation functions. See <xref "
-"linkend=\"batch\" /> for some possible batch operation tricks."
-msgstr ""
-"请注意 Hibernate 本身不提供直接执行 <literal>UPDATE</literal> 或 "
-"<literal>DELETE</literal> 语句的 API。Hibernate 提供的是 <emphasis>state "
-"management</emphasis> 服务,你不必考虑要使用的 <emphasis>statements</"
-"emphasis>。JDBC 是出色的执行 SQL 语句的 API,任何时候调用 <literal>session."
-"connection()</literal> 你都可以得到一个 <literal>Connection</literal> 对象。 "
-"此外,在联机事务处理(OLTP)程序中,大量操作(mass operations)与对象/关系映"
-"射的观点是相冲突的。Hibernate 的将来版本可能会提供专门的进行大量操作(mass "
-"operation)的功能。参考 <xref linkend=\"batch\"/>,寻找一些可用的批量"
-"(batch)操作技巧。 "
+#, no-c-format
+msgid "Hibernate does not offer its own API for direct execution of <literal>UPDATE</literal> or <literal>DELETE</literal> statements. Hibernate is a <emphasis>state management</emphasis> service, you do not have to think in <emphasis>statements</emphasis> to use it. JDBC is a perfect API for executing SQL statements, you can get a JDBC <literal>Connection</literal> at any time by calling <literal>session.connection()</literal>. Furthermore, the notion of mass operations conflicts with object/relational mapping for online transaction processing-oriented applications. Future versions of Hibernate can, however, provide special mass operation functions. See <xref linkend=\"batch\" /> for some possible batch operation tricks."
+msgstr "请注意 Hibernate 本身不提供直接执行 <literal>UPDATE</literal> 或 <literal>DELETE</literal> 语句的 API。Hibernate 提供的是 <emphasis>state management</emphasis> 服务,你不必考虑要使用的 <emphasis>statements</emphasis>。JDBC 是出色的执行 SQL 语句的 API,任何时候调用 <literal>session.connection()</literal> 你都可以得到一个 <literal>Connection</literal> 对象。 此外,在联机事务处理(OLTP)程序中,大量操作(mass operations)与对象/关系映射的观点是相冲突的。Hibernate 的将来版本可能会提供专门的进行大量操作(mass operation)的功能。参考 <xref linkend=\"batch\"/>,寻找一些可用的批量(batch)操作技巧。"
#. Tag: title
#, no-c-format
@@ -759,100 +362,43 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Many applications need to retrieve an object in one transaction, send it to "
-"the UI layer for manipulation, then save the changes in a new transaction. "
-"Applications that use this kind of approach in a high-concurrency "
-"environment usually use versioned data to ensure isolation for the \"long\" "
-"unit of work."
-msgstr ""
-"很多程序需要在某个事务中获取对象,然后将对象发送到界面层去操作,最后在一个新"
-"的事务保存所做的修改。在高并发访问的环境中使用这种方式,通常使用附带版本信息"
-"的数据来保证这些“长“工作单元之间的隔离。"
+msgid "Many applications need to retrieve an object in one transaction, send it to the UI layer for manipulation, then save the changes in a new transaction. Applications that use this kind of approach in a high-concurrency environment usually use versioned data to ensure isolation for the \"long\" unit of work."
+msgstr "很多程序需要在某个事务中获取对象,然后将对象发送到界面层去操作,最后在一个新的事务保存所做的修改。在高并发访问的环境中使用这种方式,通常使用附带版本信息的数据来保证这些“长“工作单元之间的隔离。"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate supports this model by providing for reattachment of detached "
-"instances using the <literal>Session.update()</literal> or <literal>Session."
-"merge()</literal> methods:"
-msgstr ""
-"Hibernate 通过提供 <literal>Session.update()</literal> 或 <literal>Session."
-"merge()</literal> 重新关联脱管实例的办法来支持这种模型。"
+msgid "Hibernate supports this model by providing for reattachment of detached instances using the <literal>Session.update()</literal> or <literal>Session.merge()</literal> methods:"
+msgstr "Hibernate 通过提供 <literal>Session.update()</literal> 或 <literal>Session.merge()</literal> 重新关联脱管实例的办法来支持这种模型。"
#. Tag: para
#, no-c-format
-msgid ""
-"If the <literal>Cat</literal> with identifier <literal>catId</literal> had "
-"already been loaded by <literal>secondSession</literal> when the application "
-"tried to reattach it, an exception would have been thrown."
-msgstr ""
-"如果具有 <literal>catId</literal> 持久化标识的 <literal>Cat</literal> 之前已"
-"经被<literal>另一Session(secondSession)</literal>装载了, 应用程序进行重关"
-"联操作(reattach)的时候会抛出一个异常。"
+msgid "If the <literal>Cat</literal> with identifier <literal>catId</literal> had already been loaded by <literal>secondSession</literal> when the application tried to reattach it, an exception would have been thrown."
+msgstr "如果具有 <literal>catId</literal> 持久化标识的 <literal>Cat</literal> 之前已经被<literal>另一Session(secondSession)</literal>装载了, 应用程序进行重关联操作(reattach)的时候会抛出一个异常。"
#. Tag: para
#, no-c-format
-msgid ""
-"Use <literal>update()</literal> if you are certain that the session does not "
-"contain an already persistent instance with the same identifier. Use "
-"<literal>merge()</literal> if you want to merge your modifications at any "
-"time without consideration of the state of the session. In other words, "
-"<literal>update()</literal> is usually the first method you would call in a "
-"fresh session, ensuring that the reattachment of your detached instances is "
-"the first operation that is executed."
-msgstr ""
-"如果你确定当前 session 没有包含与之具有相同持久化标识的持久实例,使用 "
-"<literal>update()</literal>。如果想随时合并你的的改动而不考虑 session 的状"
-"态,使用 <literal>merge()</literal>。换句话说,在一个新 session 中通常第一个"
-"调用的是 <literal>update()</literal> 方法,以便保证重新关联脱管(detached)对"
-"象的操作首先被执行。 "
+msgid "Use <literal>update()</literal> if you are certain that the session does not contain an already persistent instance with the same identifier. Use <literal>merge()</literal> if you want to merge your modifications at any time without consideration of the state of the session. In other words, <literal>update()</literal> is usually the first method you would call in a fresh session, ensuring that the reattachment of your detached instances is the first operation that is executed."
+msgstr "如果你确定当前 session 没有包含与之具有相同持久化标识的持久实例,使用 <literal>update()</literal>。如果想随时合并你的的改动而不考虑 session 的状态,使用 <literal>merge()</literal>。换句话说,在一个新 session 中通常第一个调用的是 <literal>update()</literal> 方法,以便保证重新关联脱管(detached)对象的操作首先被执行。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The application should individually <literal>update()</literal> detached "
-"instances that are reachable from the given detached instance "
-"<emphasis>only</emphasis> if it wants their state to be updated. This can be "
-"automated using <emphasis>transitive persistence</emphasis>. See <xref "
-"linkend=\"objectstate-transitive\" /> for more information."
-msgstr ""
-"如果希望相关联的脱管对象(通过引用“可到达”的脱管对象)的数据也要更新到数据库"
-"时(并且也<emphasis>仅仅</emphasis>在这种情况),可以对该相关联的脱管对象单独"
-"调用 <literal>update()</literal> 当然这些可以自动完成,即通过使用<emphasis>传"
-"播性持久化(transitive persistence)</emphasis>,请看 <xref linkend="
-"\"objectstate-transitive\"/>。 "
+#, no-c-format
+msgid "The application should individually <literal>update()</literal> detached instances that are reachable from the given detached instance <emphasis>only</emphasis> if it wants their state to be updated. This can be automated using <emphasis>transitive persistence</emphasis>. See <xref linkend=\"objectstate-transitive\" /> for more information."
+msgstr "如果希望相关联的脱管对象(通过引用“可到达”的脱管对象)的数据也要更新到数据库时(并且也<emphasis>仅仅</emphasis>在这种情况),可以对该相关联的脱管对象单独调用 <literal>update()</literal> 当然这些可以自动完成,即通过使用<emphasis>传播性持久化(transitive persistence)</emphasis>,请看 <xref linkend=\"objectstate-transitive\"/>。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>lock()</literal> method also allows an application to "
-"reassociate an object with a new session. However, the detached instance has "
-"to be unmodified."
-msgstr ""
-"<literal>lock()</literal> 方法也允许程序重新关联某个对象到一个新 session 上。"
-"不过,该脱管(detached)的对象必须是没有修改过的。"
+msgid "The <literal>lock()</literal> method also allows an application to reassociate an object with a new session. However, the detached instance has to be unmodified."
+msgstr "<literal>lock()</literal> 方法也允许程序重新关联某个对象到一个新 session 上。不过,该脱管(detached)的对象必须是没有修改过的。"
#. Tag: para
#, no-c-format
-msgid ""
-"Note that <literal>lock()</literal> can be used with various "
-"<literal>LockMode</literal>s. See the API documentation and the chapter on "
-"transaction handling for more information. Reattachment is not the only "
-"usecase for <literal>lock()</literal>."
-msgstr ""
-"请注意,<literal>lock()</literal> 可以搭配多种 <literal>LockMode</literal>,"
-"更多信息请阅读 API 文档以及关于事务处理(transaction handling)的章节。重新关"
-"联不是 <literal>lock()</literal> 的唯一用途。 "
+msgid "Note that <literal>lock()</literal> can be used with various <literal>LockMode</literal>s. See the API documentation and the chapter on transaction handling for more information. Reattachment is not the only usecase for <literal>lock()</literal>."
+msgstr "请注意,<literal>lock()</literal> 可以搭配多种 <literal>LockMode</literal>,更多信息请阅读 API 文档以及关于事务处理(transaction handling)的章节。重新关联不是 <literal>lock()</literal> 的唯一用途。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Other models for long units of work are discussed in <xref linkend="
-"\"transactions-optimistic\" />."
-msgstr ""
-"其他用于长时间工作单元的模型会在 <xref linkend=\"transactions-optimistic\"/> "
-"中讨论。 "
+#, no-c-format
+msgid "Other models for long units of work are discussed in <xref linkend=\"transactions-optimistic\" />."
+msgstr "其他用于长时间工作单元的模型会在 <xref linkend=\"transactions-optimistic\"/> 中讨论。"
#. Tag: title
#, no-c-format
@@ -861,39 +407,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate users have requested a general purpose method that either saves a "
-"transient instance by generating a new identifier or updates/reattaches the "
-"detached instances associated with its current identifier. The "
-"<literal>saveOrUpdate()</literal> method implements this functionality."
-msgstr ""
-"Hibernate 的用户曾要求一个既可自动分配新持久化标识(identifier)保存瞬时"
-"(transient)对象,又可更新/重新关联脱管(detached)实例的通用方法。"
-"<literal>saveOrUpdate()</literal> 方法实现了这个功能。"
+msgid "Hibernate users have requested a general purpose method that either saves a transient instance by generating a new identifier or updates/reattaches the detached instances associated with its current identifier. The <literal>saveOrUpdate()</literal> method implements this functionality."
+msgstr "Hibernate 的用户曾要求一个既可自动分配新持久化标识(identifier)保存瞬时(transient)对象,又可更新/重新关联脱管(detached)实例的通用方法。<literal>saveOrUpdate()</literal> 方法实现了这个功能。"
#. Tag: para
#, no-c-format
-msgid ""
-"The usage and semantics of <literal>saveOrUpdate()</literal> seems to be "
-"confusing for new users. Firstly, so long as you are not trying to use "
-"instances from one session in another new session, you should not need to "
-"use <literal>update()</literal>, <literal>saveOrUpdate()</literal>, or "
-"<literal>merge()</literal>. Some whole applications will never use either of "
-"these methods."
-msgstr ""
-"<literal>saveOrUpdate()</literal> 用途和语义可能会使新用户感到迷惑。首先,只"
-"要你没有尝试在某个 session 中使用来自另一 session 的实例,你就应该不需要使"
-"用 <literal>update()</literal>, <literal>saveOrUpdate()</literal>,或 "
-"<literal>merge()</literal>。有些程序从来不用这些方法。"
+msgid "The usage and semantics of <literal>saveOrUpdate()</literal> seems to be confusing for new users. Firstly, so long as you are not trying to use instances from one session in another new session, you should not need to use <literal>update()</literal>, <literal>saveOrUpdate()</literal>, or <literal>merge()</literal>. Some whole applications will never use either of these methods."
+msgstr "<literal>saveOrUpdate()</literal> 用途和语义可能会使新用户感到迷惑。首先,只要你没有尝试在某个 session 中使用来自另一 session 的实例,你就应该不需要使用 <literal>update()</literal>, <literal>saveOrUpdate()</literal>,或 <literal>merge()</literal>。有些程序从来不用这些方法。"
#. Tag: para
#, no-c-format
-msgid ""
-"Usually <literal>update()</literal> or <literal>saveOrUpdate()</literal> are "
-"used in the following scenario:"
-msgstr ""
-"通常下面的场景会使用 <literal>update()</literal> 或 <literal>saveOrUpdate()</"
-"literal>:"
+msgid "Usually <literal>update()</literal> or <literal>saveOrUpdate()</literal> are used in the following scenario:"
+msgstr "通常下面的场景会使用 <literal>update()</literal> 或 <literal>saveOrUpdate()</literal>:"
#. Tag: para
#, no-c-format
@@ -917,9 +442,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"the application persists these modifications by calling <literal>update()</"
-"literal> in a second session"
+msgid "the application persists these modifications by calling <literal>update()</literal> in a second session"
msgstr "程序调用第二个 session 的 <literal>update()</literal> 方法持久这些改动"
#. Tag: para
@@ -934,39 +457,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"if another object associated with the session has the same identifier, throw "
-"an exception"
-msgstr ""
-"如果另一个与本 session 关联的对象拥有相同的持久化标识(identifier),抛出一个"
-"异常"
+msgid "if another object associated with the session has the same identifier, throw an exception"
+msgstr "如果另一个与本 session 关联的对象拥有相同的持久化标识(identifier),抛出一个异常"
#. Tag: para
#, no-c-format
msgid "if the object has no identifier property, <literal>save()</literal> it"
-msgstr ""
-"如果对象没有持久化标识(identifier)属性,对其调用 <literal>save()</literal>"
+msgstr "如果对象没有持久化标识(identifier)属性,对其调用 <literal>save()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"if the object's identifier has the value assigned to a newly instantiated "
-"object, <literal>save()</literal> it"
-msgstr ""
-"如果对象的持久标识(identifier)表明其是一个新实例化的对象,对其调用 "
-"<literal>save()</literal>。"
+msgid "if the object's identifier has the value assigned to a newly instantiated object, <literal>save()</literal> it"
+msgstr "如果对象的持久标识(identifier)表明其是一个新实例化的对象,对其调用 <literal>save()</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"if the object is versioned by a <literal><version></literal> or "
-"<literal><timestamp></literal>, and the version property value is the "
-"same value assigned to a newly instantiated object, <literal>save()</"
-"literal> it"
-msgstr ""
-"如果对象是附带版本信息的(通过 <literal><version></literal> 或 "
-"<literal><timestamp></literal>)并且版本属性的值表明其是一个新实例化的"
-"对象,<literal>save()</literal> 它。 "
+msgid "if the object is versioned by a <literal><version></literal> or <literal><timestamp></literal>, and the version property value is the same value assigned to a newly instantiated object, <literal>save()</literal> it"
+msgstr "如果对象是附带版本信息的(通过 <literal><version></literal> 或 <literal><timestamp></literal>)并且版本属性的值表明其是一个新实例化的对象,<literal>save()</literal> 它。 "
#. Tag: para
#, no-c-format
@@ -980,21 +487,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"if there is a persistent instance with the same identifier currently "
-"associated with the session, copy the state of the given object onto the "
-"persistent instance"
-msgstr ""
-"如果 session 中存在相同持久化标识(identifier)的实例,用用户给出的对象的状态"
-"覆盖旧有的持久实例"
+msgid "if there is a persistent instance with the same identifier currently associated with the session, copy the state of the given object onto the persistent instance"
+msgstr "如果 session 中存在相同持久化标识(identifier)的实例,用用户给出的对象的状态覆盖旧有的持久实例"
#. Tag: para
#, no-c-format
-msgid ""
-"if there is no persistent instance currently associated with the session, "
-"try to load it from the database, or create a new persistent instance"
-msgstr ""
-"如果 session 没有相应的持久实例,则尝试从数据库中加载,或创建新的持久化实例"
+msgid "if there is no persistent instance currently associated with the session, try to load it from the database, or create a new persistent instance"
+msgstr "如果 session 没有相应的持久实例,则尝试从数据库中加载,或创建新的持久化实例"
#. Tag: para
#, no-c-format
@@ -1003,9 +502,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"the given instance does not become associated with the session, it remains "
-"detached"
+msgid "the given instance does not become associated with the session, it remains detached"
msgstr "用户给出的这个对象没有被关联到 session 上,它依旧是脱管的"
#. Tag: title
@@ -1015,28 +512,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>Session.delete()</literal> will remove an object's state from the "
-"database. Your application, however, can still hold a reference to a deleted "
-"object. It is best to think of <literal>delete()</literal> as making a "
-"persistent instance, transient."
-msgstr ""
-"使用 <literal>Session.delete()</literal> 会把对象的状态从数据库中移除。当然,"
-"你的应用程序可能仍然持有一个指向已删除对象的引用。所以,最好这样理解:"
-"<literal>delete()</literal> 的用途是把一个持久实例变成瞬时(transient)实"
-"例。 "
+msgid "<literal>Session.delete()</literal> will remove an object's state from the database. Your application, however, can still hold a reference to a deleted object. It is best to think of <literal>delete()</literal> as making a persistent instance, transient."
+msgstr "使用 <literal>Session.delete()</literal> 会把对象的状态从数据库中移除。当然,你的应用程序可能仍然持有一个指向已删除对象的引用。所以,最好这样理解:<literal>delete()</literal> 的用途是把一个持久实例变成瞬时(transient)实例。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can delete objects in any order, without risk of foreign key constraint "
-"violations. It is still possible to violate a <literal>NOT NULL</literal> "
-"constraint on a foreign key column by deleting objects in the wrong order, e."
-"g. if you delete the parent, but forget to delete the children."
-msgstr ""
-"你可以用你喜欢的任何顺序删除对象,不用担心外键约束冲突。当然,如果你搞错了顺"
-"序,还是有可能引发在外键字段定义的 <literal>NOT NULL</literal> 约束冲突。例如"
-"你删除了父对象,但是忘记删除其子对象。"
+msgid "You can delete objects in any order, without risk of foreign key constraint violations. It is still possible to violate a <literal>NOT NULL</literal> constraint on a foreign key column by deleting objects in the wrong order, e.g. if you delete the parent, but forget to delete the children."
+msgstr "你可以用你喜欢的任何顺序删除对象,不用担心外键约束冲突。当然,如果你搞错了顺序,还是有可能引发在外键字段定义的 <literal>NOT NULL</literal> 约束冲突。例如你删除了父对象,但是忘记删除其子对象。"
#. Tag: title
#, no-c-format
@@ -1045,71 +527,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is sometimes useful to be able to take a graph of persistent instances "
-"and make them persistent in a different datastore, without regenerating "
-"identifier values."
-msgstr ""
-"偶尔会用到不重新生成持久化标识(identifier),将持久实例以及其关联的实例持久"
-"到不同的数据库中的操作。 "
+msgid "It is sometimes useful to be able to take a graph of persistent instances and make them persistent in a different datastore, without regenerating identifier values."
+msgstr "偶尔会用到不重新生成持久化标识(identifier),将持久实例以及其关联的实例持久到不同的数据库中的操作。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>ReplicationMode</literal> determines how <literal>replicate()</"
-"literal> will deal with conflicts with existing rows in the database:"
-msgstr ""
-"<literal>ReplicationMode</literal> 决定在和数据库中已存在记录由冲突时,"
-"<literal>replicate()</literal> 如何处理。 "
+msgid "The <literal>ReplicationMode</literal> determines how <literal>replicate()</literal> will deal with conflicts with existing rows in the database:"
+msgstr "<literal>ReplicationMode</literal> 决定在和数据库中已存在记录由冲突时,<literal>replicate()</literal> 如何处理。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ReplicationMode.IGNORE</literal>: ignores the object when there is "
-"an existing database row with the same identifier"
-msgstr ""
-"<literal>ReplicationMode.IGNORE</literal>:当某个现有数据库记录具有相同标识符"
-"时忽略它"
+msgid "<literal>ReplicationMode.IGNORE</literal>: ignores the object when there is an existing database row with the same identifier"
+msgstr "<literal>ReplicationMode.IGNORE</literal>:当某个现有数据库记录具有相同标识符时忽略它"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ReplicationMode.OVERWRITE</literal>: overwrites any existing "
-"database row with the same identifier"
-msgstr ""
-"<literal>ReplicationMode.OVERWRITE</literal>:用相同的标识符覆盖现有数据库记"
-"录"
+msgid "<literal>ReplicationMode.OVERWRITE</literal>: overwrites any existing database row with the same identifier"
+msgstr "<literal>ReplicationMode.OVERWRITE</literal>:用相同的标识符覆盖现有数据库记录"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ReplicationMode.EXCEPTION</literal>: throws an exception if there "
-"is an existing database row with the same identifier"
-msgstr ""
-"<literal>ReplicationMode.EXCEPTION</literal>:当某个现有数据库记录具有相同标"
-"识符时抛出异常"
+msgid "<literal>ReplicationMode.EXCEPTION</literal>: throws an exception if there is an existing database row with the same identifier"
+msgstr "<literal>ReplicationMode.EXCEPTION</literal>:当某个现有数据库记录具有相同标识符时抛出异常"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ReplicationMode.LATEST_VERSION</literal>: overwrites the row if its "
-"version number is earlier than the version number of the object, or ignore "
-"the object otherwise"
-msgstr ""
-"<literal>ReplicationMode.LATEST_VERSION</literal>:如果当前的版本较新,则覆"
-"盖,否则忽略"
+msgid "<literal>ReplicationMode.LATEST_VERSION</literal>: overwrites the row if its version number is earlier than the version number of the object, or ignore the object otherwise"
+msgstr "<literal>ReplicationMode.LATEST_VERSION</literal>:如果当前的版本较新,则覆盖,否则忽略"
#. Tag: para
#, no-c-format
-msgid ""
-"Usecases for this feature include reconciling data entered into different "
-"database instances, upgrading system configuration information during "
-"product upgrades, rolling back changes made during non-ACID transactions and "
-"more."
-msgstr ""
-"这个功能的用途包括使录入的数据在不同数据库中一致,产品升级时升级系统配置信"
-"息,回滚 non-ACID 事务中的修改等等。(译注,non-ACID,非 ACID;ACID,Atomic,"
-"Consistent,Isolated and Durable 的缩写)"
+msgid "Usecases for this feature include reconciling data entered into different database instances, upgrading system configuration information during product upgrades, rolling back changes made during non-ACID transactions and more."
+msgstr "这个功能的用途包括使录入的数据在不同数据库中一致,产品升级时升级系统配置信息,回滚 non-ACID 事务中的修改等等。(译注,non-ACID,非 ACID;ACID,Atomic,Consistent,Isolated and Durable 的缩写)"
#. Tag: title
#, no-c-format
@@ -1118,15 +567,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Sometimes the <literal>Session</literal> will execute the SQL statements "
-"needed to synchronize the JDBC connection's state with the state of objects "
-"held in memory. This process, called <emphasis>flush</emphasis>, occurs by "
-"default at the following points:"
-msgstr ""
-"每间隔一段时间,<literal>Session</literal> 会执行一些必需的 SQL 语句来把内存"
-"中的对象的状态同步到 JDBC 连接中。这个过程被称为<emphasis>刷出(flush)</"
-"emphasis>,默认会在下面的时间点执行: "
+msgid "Sometimes the <literal>Session</literal> will execute the SQL statements needed to synchronize the JDBC connection's state with the state of objects held in memory. This process, called <emphasis>flush</emphasis>, occurs by default at the following points:"
+msgstr "每间隔一段时间,<literal>Session</literal> 会执行一些必需的 SQL 语句来把内存中的对象的状态同步到 JDBC 连接中。这个过程被称为<emphasis>刷出(flush)</emphasis>,默认会在下面的时间点执行: "
#. Tag: para
#, no-c-format
@@ -1150,12 +592,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"all entity insertions in the same order the corresponding objects were saved "
-"using <literal>Session.save()</literal>"
-msgstr ""
-"所有对实体进行插入的语句,其顺序按照对象执行 <literal>Session.save()</"
-"literal> 的时间顺序 "
+msgid "all entity insertions in the same order the corresponding objects were saved using <literal>Session.save()</literal>"
+msgstr "所有对实体进行插入的语句,其顺序按照对象执行 <literal>Session.save()</literal> 的时间顺序 "
#. Tag: para
#, no-c-format
@@ -1179,67 +617,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"all entity deletions in the same order the corresponding objects were "
-"deleted using <literal>Session.delete()</literal>"
-msgstr ""
-"所有对实体进行删除的语句,其顺序按照对象执行 <literal>Session.delete()</"
-"literal> 的时间顺序 "
+msgid "all entity deletions in the same order the corresponding objects were deleted using <literal>Session.delete()</literal>"
+msgstr "所有对实体进行删除的语句,其顺序按照对象执行 <literal>Session.delete()</literal> 的时间顺序 "
#. Tag: para
#, no-c-format
-msgid ""
-"An exception is that objects using <literal>native</literal> ID generation "
-"are inserted when they are saved."
-msgstr ""
-"有一个例外是,如果对象使用 <literal>native</literal> 方式来生成 ID(持久化标"
-"识)的话,它们一执行 save 就会被插入。"
+msgid "An exception is that objects using <literal>native</literal> ID generation are inserted when they are saved."
+msgstr "有一个例外是,如果对象使用 <literal>native</literal> 方式来生成 ID(持久化标识)的话,它们一执行 save 就会被插入。"
#. Tag: para
#, no-c-format
-msgid ""
-"Except when you explicitly <literal>flush()</literal>, there are absolutely "
-"no guarantees about <emphasis>when</emphasis> the <literal>Session</literal> "
-"executes the JDBC calls, only the <emphasis>order</emphasis> in which they "
-"are executed. However, Hibernate does guarantee that the <literal>Query.list"
-"(..)</literal> will never return stale or incorrect data."
-msgstr ""
-"除非你明确地发出了 <literal>flush()</literal> 指令,关于 Session<emphasis> 何"
-"时</emphasis>会执行这些 JDBC 调用是完全无法保证的,只能保证它们执行的前后顺"
-"序。当然,Hibernate 保证,<literal>Query.list(..)</literal> 绝对不会返回已经"
-"失效的数据,也不会返回错误数据。 "
+msgid "Except when you explicitly <literal>flush()</literal>, there are absolutely no guarantees about <emphasis>when</emphasis> the <literal>Session</literal> executes the JDBC calls, only the <emphasis>order</emphasis> in which they are executed. However, Hibernate does guarantee that the <literal>Query.list(..)</literal> will never return stale or incorrect data."
+msgstr "除非你明确地发出了 <literal>flush()</literal> 指令,关于 Session<emphasis> 何时</emphasis>会执行这些 JDBC 调用是完全无法保证的,只能保证它们执行的前后顺序。当然,Hibernate 保证,<literal>Query.list(..)</literal> 绝对不会返回已经失效的数据,也不会返回错误数据。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"It is possible to change the default behavior so that flush occurs less "
-"frequently. The <literal>FlushMode</literal> class defines three different "
-"modes: only flush at commit time when the Hibernate <literal>Transaction</"
-"literal> API is used, flush automatically using the explained routine, or "
-"never flush unless <literal>flush()</literal> is called explicitly. The last "
-"mode is useful for long running units of work, where a <literal>Session</"
-"literal> is kept open and disconnected for a long time (see <xref linkend="
-"\"transactions-optimistic-longsession\" />)."
-msgstr ""
-"也可以改变默认的设置,来让刷出(flush)操作发生的不那么频繁。"
-"<literal>FlushMode</literal> 类定义了三种不同的方式。仅在提交时刷出(仅当 "
-"Hibernate 的 <literal>Transaction</literal> API 被使用时有效),按照刚才说的"
-"方式刷出,以及除非明确使用 <literal>flush()</literal> 否则从不刷出。 最后一种"
-"模式对于那些需要长时间保持 <literal>Session</literal> 为打开或者断线状态的长"
-"时间运行的工作单元很有用。(参见 <xref linkend=\"transactions-optimistic-"
-"longsession\"/>)。 "
+#, no-c-format
+msgid "It is possible to change the default behavior so that flush occurs less frequently. The <literal>FlushMode</literal> class defines three different modes: only flush at commit time when the Hibernate <literal>Transaction</literal> API is used, flush automatically using the explained routine, or never flush unless <literal>flush()</literal> is called explicitly. The last mode is useful for long running units of work, where a <literal>Session</literal> is kept open and disconnected for a long time (see <xref linkend=\"transactions-optimistic-longsession\" />)."
+msgstr "也可以改变默认的设置,来让刷出(flush)操作发生的不那么频繁。<literal>FlushMode</literal> 类定义了三种不同的方式。仅在提交时刷出(仅当 Hibernate 的 <literal>Transaction</literal> API 被使用时有效),按照刚才说的方式刷出,以及除非明确使用 <literal>flush()</literal> 否则从不刷出。 最后一种模式对于那些需要长时间保持 <literal>Session</literal> 为打开或者断线状态的长时间运行的工作单元很有用。(参见 <xref linkend=\"transactions-optimistic-longsession\"/>)。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"During flush, an exception might occur (e.g. if a DML operation violates a "
-"constraint). Since handling exceptions involves some understanding of "
-"Hibernate's transactional behavior, we discuss it in <xref linkend="
-"\"transactions\" />."
-msgstr ""
-"刷出(flush)期间,可能会抛出异常(例如一个 DML 操作违反了约束)。异常处理涉"
-"及到对 Hibernate 事务性行为的理解,因此我们将在 <xref linkend=\"transactions"
-"\"/> 中讨论。"
+#, no-c-format
+msgid "During flush, an exception might occur (e.g. if a DML operation violates a constraint). Since handling exceptions involves some understanding of Hibernate's transactional behavior, we discuss it in <xref linkend=\"transactions\" />."
+msgstr "刷出(flush)期间,可能会抛出异常(例如一个 DML 操作违反了约束)。异常处理涉及到对 Hibernate 事务性行为的理解,因此我们将在 <xref linkend=\"transactions\"/> 中讨论。 "
#. Tag: title
#, no-c-format
@@ -1248,67 +647,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is quite cumbersome to save, delete, or reattach individual objects, "
-"especially if you deal with a graph of associated objects. A common case is "
-"a parent/child relationship. Consider the following example:"
-msgstr ""
-"对每一个对象都要执行保存,删除或重关联操作让人感觉有点麻烦,尤其是在处理许多"
-"彼此关联的对象的时候。一个常见的例子是父子关系。考虑下面的例子:"
+msgid "It is quite cumbersome to save, delete, or reattach individual objects, especially if you deal with a graph of associated objects. A common case is a parent/child relationship. Consider the following example:"
+msgstr "对每一个对象都要执行保存,删除或重关联操作让人感觉有点麻烦,尤其是在处理许多彼此关联的对象的时候。一个常见的例子是父子关系。考虑下面的例子:"
#. Tag: para
#, no-c-format
-msgid ""
-"If the children in a parent/child relationship would be value typed (e.g. a "
-"collection of addresses or strings), their life cycle would depend on the "
-"parent and no further action would be required for convenient \"cascading\" "
-"of state changes. When the parent is saved, the value-typed child objects "
-"are saved and when the parent is deleted, the children will be deleted, etc. "
-"This works for operations such as the removal of a child from the "
-"collection. Since value-typed objects cannot have shared references, "
-"Hibernate will detect this and delete the child from the database."
-msgstr ""
-"如果一个父子关系中的子对象是值类型(value typed)(例如,地址或字符串的集合)"
-"的,他们的生命周期会依赖于父对象,可以享受方便的级联操作(Cascading),不需要"
-"额外的动作。父对象被保存时,这些值类型(value typed)子对象也将被保存;父对象"
-"被删除时,子对象也将被删除。这对将一个子对象从集合中移除是同样有效:"
-"Hibernate 会检测到,并且因为值类型(value typed)的对象不可能被其他对象引用,"
-"所以 Hibernate 会在数据库中删除这个子对象。 "
+msgid "If the children in a parent/child relationship would be value typed (e.g. a collection of addresses or strings), their life cycle would depend on the parent and no further action would be required for convenient \"cascading\" of state changes. When the parent is saved, the value-typed child objects are saved and when the parent is deleted, the children will be deleted, etc. This works for operations such as the removal of a child from the collection. Since value-typed objects cannot have shared references, Hibernate will detect this and delete the child from the database."
+msgstr "如果一个父子关系中的子对象是值类型(value typed)(例如,地址或字符串的集合)的,他们的生命周期会依赖于父对象,可以享受方便的级联操作(Cascading),不需要额外的动作。父对象被保存时,这些值类型(value typed)子对象也将被保存;父对象被删除时,子对象也将被删除。这对将一个子对象从集合中移除是同样有效:Hibernate 会检测到,并且因为值类型(value typed)的对象不可能被其他对象引用,所以 Hibernate 会在数据库中删除这个子对象。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Now consider the same scenario with parent and child objects being entities, "
-"not value-types (e.g. categories and items, or parent and child cats). "
-"Entities have their own life cycle and support shared references. Removing "
-"an entity from the collection does not mean it can be deleted), and there is "
-"by default no cascading of state from one entity to any other associated "
-"entities. Hibernate does not implement <emphasis>persistence by "
-"reachability</emphasis> by default."
-msgstr ""
-"现在考虑同样的场景,不过父子对象都是实体(entities)类型,而非值类型(value "
-"typed)(例如,类别与个体,或母猫和小猫)。实体有自己的生命期,允许共享对其的"
-"引用(因此从集合中移除一个实体,不意味着它可以被删除),并且实体到其他关联实"
-"体之间默认没有级联操作的设置。 Hibernate 默认不实现所谓的<emphasis>可到达即持"
-"久化(persistence by reachability)</emphasis>的策略。 "
+msgid "Now consider the same scenario with parent and child objects being entities, not value-types (e.g. categories and items, or parent and child cats). Entities have their own life cycle and support shared references. Removing an entity from the collection does not mean it can be deleted), and there is by default no cascading of state from one entity to any other associated entities. Hibernate does not implement <emphasis>persistence by reachability</emphasis> by default."
+msgstr "现在考虑同样的场景,不过父子对象都是实体(entities)类型,而非值类型(value typed)(例如,类别与个体,或母猫和小猫)。实体有自己的生命期,允许共享对其的引用(因此从集合中移除一个实体,不意味着它可以被删除),并且实体到其他关联实体之间默认没有级联操作的设置。 Hibernate 默认不实现所谓的<emphasis>可到达即持久化(persistence by reachability)</emphasis>的策略。 "
#. Tag: para
#, no-c-format
-msgid ""
-"For each basic operation of the Hibernate session - including "
-"<literal>persist(), merge(), saveOrUpdate(), delete(), lock(), refresh(), "
-"evict(), replicate()</literal> - there is a corresponding cascade style. "
-"Respectively, the cascade styles are named <literal>create, merge, save-"
-"update, delete, lock, refresh, evict, replicate</literal>. If you want an "
-"operation to be cascaded along an association, you must indicate that in the "
-"mapping document. For example:"
-msgstr ""
-"每个 Hibernate session 的基本操作 — 包括 <literal>persist(), merge(), "
-"saveOrUpdate(), delete(), lock(), refresh(), evict(), replicate()</literal> "
-"— 都有对应的级联风格(cascade style)。这些级联风格(cascade style)风格分别"
-"命名为 <literal>create, merge, save-update, delete, lock, refresh, evict, "
-"replicate</literal>。如果你希望一个操作被顺着关联关系级联传播,你必须在映射文"
-"件中指出这一点。例如:"
+msgid "For each basic operation of the Hibernate session - including <literal>persist(), merge(), saveOrUpdate(), delete(), lock(), refresh(), evict(), replicate()</literal> - there is a corresponding cascade style. Respectively, the cascade styles are named <literal>create, merge, save-update, delete, lock, refresh, evict, replicate</literal>. If you want an operation to be cascaded along an association, you must indicate that in the mapping document. For example:"
+msgstr "每个 Hibernate session 的基本操作 — 包括 <literal>persist(), merge(), saveOrUpdate(), delete(), lock(), refresh(), evict(), replicate()</literal> — 都有对应的级联风格(cascade style)。这些级联风格(cascade style)风格分别命名为 <literal>create, merge, save-update, delete, lock, refresh, evict, replicate</literal>。如果你希望一个操作被顺着关联关系级联传播,你必须在映射文件中指出这一点。例如:"
#. Tag: para
#, no-c-format
@@ -1317,27 +672,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can even use <literal>cascade=\"all\"</literal> to specify that "
-"<emphasis>all</emphasis> operations should be cascaded along the "
-"association. The default <literal>cascade=\"none\"</literal> specifies that "
-"no operations are to be cascaded."
-msgstr ""
-"你可以使用 <literal>cascade=\"all\"</literal> 来指定<emphasis>全部</emphasis>"
-"操作都顺着关联关系级联(cascaded)。默认值是 <literal>cascade=\"none\"</"
-"literal>,即任何操作都不会被级联(cascaded)。 "
+msgid "You can even use <literal>cascade=\"all\"</literal> to specify that <emphasis>all</emphasis> operations should be cascaded along the association. The default <literal>cascade=\"none\"</literal> specifies that no operations are to be cascaded."
+msgstr "你可以使用 <literal>cascade=\"all\"</literal> 来指定<emphasis>全部</emphasis>操作都顺着关联关系级联(cascaded)。默认值是 <literal>cascade=\"none\"</literal>,即任何操作都不会被级联(cascaded)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A special cascade style, <literal>delete-orphan</literal>, applies only to "
-"one-to-many associations, and indicates that the <literal>delete()</literal> "
-"operation should be applied to any child object that is removed from the "
-"association."
-msgstr ""
-"注意有一个特殊的级联风格(cascade style) <literal>delete-orphan</literal>,"
-"只应用于 one-to-many 关联,表明 <literal>delete()</literal> 操作应该被应用于"
-"所有从关联中删除的对象。"
+msgid "A special cascade style, <literal>delete-orphan</literal>, applies only to one-to-many associations, and indicates that the <literal>delete()</literal> operation should be applied to any child object that is removed from the association."
+msgstr "注意有一个特殊的级联风格(cascade style) <literal>delete-orphan</literal>,只应用于 one-to-many 关联,表明 <literal>delete()</literal> 操作应该被应用于所有从关联中删除的对象。"
#. Tag: para
#, no-c-format
@@ -1346,144 +687,63 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It does not usually make sense to enable cascade on a <literal><many-to-"
-"one></literal> or <literal><many-to-many></literal> association. "
-"Cascade is often useful for <literal><one-to-one></literal> and "
-"<literal><one-to-many></literal> associations."
-msgstr ""
-"通常在 <literal><many-to-one></literal> 或 <literal><many-to-"
-"many></literal> 关系中应用级联(cascade)没什么意义。级联(cascade)通常"
-"在 <literal><one-to-one></literal> 和 <literal><one-to-many></"
-"literal> 关系中比较有用。 "
+msgid "It does not usually make sense to enable cascade on a <literal><many-to-one></literal> or <literal><many-to-many></literal> association. Cascade is often useful for <literal><one-to-one></literal> and <literal><one-to-many></literal> associations."
+msgstr "通常在 <literal><many-to-one></literal> 或 <literal><many-to-many></literal> 关系中应用级联(cascade)没什么意义。级联(cascade)通常在 <literal><one-to-one></literal> 和 <literal><one-to-many></literal> 关系中比较有用。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If the child object's lifespan is bounded by the lifespan of the parent "
-"object, make it a <emphasis>life cycle object</emphasis> by specifying "
-"<literal>cascade=\"all,delete-orphan\"</literal>."
-msgstr ""
-"如果子对象的寿命限定在父亲对象的寿命之内,可通过指定 <literal>cascade=\"all,"
-"delete-orphan\"</literal> 将其变为<emphasis>自动生命周期管理的对象"
-"(lifecycle object)</emphasis>。 "
+msgid "If the child object's lifespan is bounded by the lifespan of the parent object, make it a <emphasis>life cycle object</emphasis> by specifying <literal>cascade=\"all,delete-orphan\"</literal>."
+msgstr "如果子对象的寿命限定在父亲对象的寿命之内,可通过指定 <literal>cascade=\"all,delete-orphan\"</literal> 将其变为<emphasis>自动生命周期管理的对象(lifecycle object)</emphasis>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Otherwise, you might not need cascade at all. But if you think that you will "
-"often be working with the parent and children together in the same "
-"transaction, and you want to save yourself some typing, consider using "
-"<literal>cascade=\"persist,merge,save-update\"</literal>."
-msgstr ""
-"其他情况,你可根本不需要级联(cascade)。但是如果你认为你会经常在某个事务中同"
-"时用到父对象与子对象,并且你希望少打点儿字,可以考虑使用 <literal>cascade="
-"\"persist,merge,save-update\"</literal>。"
+msgid "Otherwise, you might not need cascade at all. But if you think that you will often be working with the parent and children together in the same transaction, and you want to save yourself some typing, consider using <literal>cascade=\"persist,merge,save-update\"</literal>."
+msgstr "其他情况,你可根本不需要级联(cascade)。但是如果你认为你会经常在某个事务中同时用到父对象与子对象,并且你希望少打点儿字,可以考虑使用 <literal>cascade=\"persist,merge,save-update\"</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"Mapping an association (either a single valued association, or a collection) "
-"with <literal>cascade=\"all\"</literal> marks the association as a "
-"<emphasis>parent/child</emphasis> style relationship where save/update/"
-"delete of the parent results in save/update/delete of the child or children."
-msgstr ""
-"可以使用 <literal>cascade=\"all\"</literal> 将一个关联关系(无论是对值对象的"
-"关联,或者对一个集合的关联)标记为<emphasis>父/子</emphasis>关系的关联。 这样"
-"对父对象进行 save/update/delete 操作就会导致子对象也进行 save/update/delete "
-"操作。"
+msgid "Mapping an association (either a single valued association, or a collection) with <literal>cascade=\"all\"</literal> marks the association as a <emphasis>parent/child</emphasis> style relationship where save/update/delete of the parent results in save/update/delete of the child or children."
+msgstr "可以使用 <literal>cascade=\"all\"</literal> 将一个关联关系(无论是对值对象的关联,或者对一个集合的关联)标记为<emphasis>父/子</emphasis>关系的关联。 这样对父对象进行 save/update/delete 操作就会导致子对象也进行 save/update/delete 操作。"
#. Tag: para
#, no-c-format
-msgid ""
-"Furthermore, a mere reference to a child from a persistent parent will "
-"result in save/update of the child. This metaphor is incomplete, however. A "
-"child which becomes unreferenced by its parent is <emphasis>not</emphasis> "
-"automatically deleted, except in the case of a <literal><one-to-many></"
-"literal> association mapped with <literal>cascade=\"delete-orphan\"</"
-"literal>. The precise semantics of cascading operations for a parent/child "
-"relationship are as follows:"
-msgstr ""
-"此外,一个持久的父对象对子对象的浅引用(mere reference)会导致子对象被同步 "
-"save/update。不过,这个隐喻(metaphor)的说法并不完整。除非关联是 "
-"<literal><one-to-many></literal> 关联并且被标记为 <literal>cascade="
-"\"delete-orphan\"</literal>,否则父对象失去对某个子对象的引用<emphasis>不会</"
-"emphasis>导致该子对象被自动删除。父子关系的级联(cascading)操作准确语义如"
-"下: "
+msgid "Furthermore, a mere reference to a child from a persistent parent will result in save/update of the child. This metaphor is incomplete, however. A child which becomes unreferenced by its parent is <emphasis>not</emphasis> automatically deleted, except in the case of a <literal><one-to-many></literal> association mapped with <literal>cascade=\"delete-orphan\"</literal>. The precise semantics of cascading operations for a parent/child relationship are as follows:"
+msgstr "此外,一个持久的父对象对子对象的浅引用(mere reference)会导致子对象被同步 save/update。不过,这个隐喻(metaphor)的说法并不完整。除非关联是 <literal><one-to-many></literal> 关联并且被标记为 <literal>cascade=\"delete-orphan\"</literal>,否则父对象失去对某个子对象的引用<emphasis>不会</emphasis>导致该子对象被自动删除。父子关系的级联(cascading)操作准确语义如下: "
#. Tag: para
#, no-c-format
-msgid ""
-"If a parent is passed to <literal>persist()</literal>, all children are "
-"passed to <literal>persist()</literal>"
-msgstr ""
-"如果父对象被 <literal>persist()</literal>,那么所有子对象也会被 "
-"<literal>persist()</literal>"
+msgid "If a parent is passed to <literal>persist()</literal>, all children are passed to <literal>persist()</literal>"
+msgstr "如果父对象被 <literal>persist()</literal>,那么所有子对象也会被 <literal>persist()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"If a parent is passed to <literal>merge()</literal>, all children are passed "
-"to <literal>merge()</literal>"
-msgstr ""
-"如果父对象被 <literal>merge()</literal>,那么所有子对象也会被 <literal>merge"
-"()</literal>"
+msgid "If a parent is passed to <literal>merge()</literal>, all children are passed to <literal>merge()</literal>"
+msgstr "如果父对象被 <literal>merge()</literal>,那么所有子对象也会被 <literal>merge()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"If a parent is passed to <literal>save()</literal>, <literal>update()</"
-"literal> or <literal>saveOrUpdate()</literal>, all children are passed to "
-"<literal>saveOrUpdate()</literal>"
-msgstr ""
-"如果父对象被 <literal>save()</literal>,<literal>update()</literal> 或 "
-"<literal>saveOrUpdate()</literal>,那么所有子对象则会被 <literal>saveOrUpdate"
-"()</literal>"
+msgid "If a parent is passed to <literal>save()</literal>, <literal>update()</literal> or <literal>saveOrUpdate()</literal>, all children are passed to <literal>saveOrUpdate()</literal>"
+msgstr "如果父对象被 <literal>save()</literal>,<literal>update()</literal> 或 <literal>saveOrUpdate()</literal>,那么所有子对象则会被 <literal>saveOrUpdate()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"If a transient or detached child becomes referenced by a persistent parent, "
-"it is passed to <literal>saveOrUpdate()</literal>"
-msgstr ""
-"如果某个持久的父对象引用了瞬时(transient)或者脱管(detached)的子对象,那么"
-"子对象将会被 <literal>saveOrUpdate()</literal>"
+msgid "If a transient or detached child becomes referenced by a persistent parent, it is passed to <literal>saveOrUpdate()</literal>"
+msgstr "如果某个持久的父对象引用了瞬时(transient)或者脱管(detached)的子对象,那么子对象将会被 <literal>saveOrUpdate()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"If a parent is deleted, all children are passed to <literal>delete()</"
-"literal>"
+msgid "If a parent is deleted, all children are passed to <literal>delete()</literal>"
msgstr "如果父对象被删除,那么所有子对象也会被 <literal>delete()</literal>"
#. Tag: para
#, no-c-format
-msgid ""
-"If a child is dereferenced by a persistent parent, <emphasis>nothing special "
-"happens</emphasis> - the application should explicitly delete the child if "
-"necessary - unless <literal>cascade=\"delete-orphan\"</literal>, in which "
-"case the \"orphaned\" child is deleted."
-msgstr ""
-"除非被标记为 <literal>cascade=\"delete-orphan\"</literal>(删除“孤儿”模式,此"
-"时不被任何一个父对象引用的子对象会被删除),否则子对象失掉父对象对其的引用"
-"时,<emphasis>什么事也不会发生</emphasis>。如果有特殊需要,应用程序可通过显式"
-"调用 delete() 删除子对象。"
+msgid "If a child is dereferenced by a persistent parent, <emphasis>nothing special happens</emphasis> - the application should explicitly delete the child if necessary - unless <literal>cascade=\"delete-orphan\"</literal>, in which case the \"orphaned\" child is deleted."
+msgstr "除非被标记为 <literal>cascade=\"delete-orphan\"</literal>(删除“孤儿”模式,此时不被任何一个父对象引用的子对象会被删除),否则子对象失掉父对象对其的引用时,<emphasis>什么事也不会发生</emphasis>。如果有特殊需要,应用程序可通过显式调用 delete() 删除子对象。"
#. Tag: para
#, no-c-format
-msgid ""
-"Finally, note that cascading of operations can be applied to an object graph "
-"at <emphasis>call time</emphasis> or at <emphasis>flush time</emphasis>. All "
-"operations, if enabled, are cascaded to associated entities reachable when "
-"the operation is executed. However, <literal>save-update</literal> and "
-"<literal>delete-orphan</literal> are transitive for all associated entities "
-"reachable during flush of the <literal>Session</literal>."
-msgstr ""
-"最后,注意操作的级联可能是在<emphasis>调用期(call time)</emphasis>或者"
-"<emphasis>写入期(flush time)</emphasis>作用到对象图上的。所有的操作,如果允"
-"许,都在操作被执行的时候级联到可触及的关联实体上。然而,<literal>save-upate</"
-"literal> 和 <literal>delete-orphan</literal> 是在<literal>Session</literal> "
-"flush 的时候才作用到所有可触及的被关联对象上的。 "
+msgid "Finally, note that cascading of operations can be applied to an object graph at <emphasis>call time</emphasis> or at <emphasis>flush time</emphasis>. All operations, if enabled, are cascaded to associated entities reachable when the operation is executed. However, <literal>save-update</literal> and <literal>delete-orphan</literal> are transitive for all associated entities reachable during flush of the <literal>Session</literal>."
+msgstr "最后,注意操作的级联可能是在<emphasis>调用期(call time)</emphasis>或者<emphasis>写入期(flush time)</emphasis>作用到对象图上的。所有的操作,如果允许,都在操作被执行的时候级联到可触及的关联实体上。然而,<literal>save-upate</literal> 和 <literal>delete-orphan</literal> 是在<literal>Session</literal> flush 的时候才作用到所有可触及的被关联对象上的。 "
#. Tag: title
#, no-c-format
@@ -1492,32 +752,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate requires a rich meta-level model of all entity and value types. "
-"This model can be useful to the application itself. For example, the "
-"application might use Hibernate's metadata to implement a \"smart\" deep-"
-"copy algorithm that understands which objects should be copied (eg. mutable "
-"value types) and which objects that should not (e.g. immutable value types "
-"and, possibly, associated entities)."
-msgstr ""
-"Hibernate 中有一个非常丰富的元级别(meta-level)的模型,含有所有的实体和值类"
-"型数据的元数据。 有时这个模型对应用程序本身也会非常有用。比如说,应用程序可能"
-"在实现一种“智能”的深度拷贝算法时,通过使用 Hibernate 的元数据来了解哪些对象应"
-"该被拷贝(比如,可变的值类型数据),那些不应该(不可变的值类型数据,也许还有"
-"某些被关联的实体)。 "
+msgid "Hibernate requires a rich meta-level model of all entity and value types. This model can be useful to the application itself. For example, the application might use Hibernate's metadata to implement a \"smart\" deep-copy algorithm that understands which objects should be copied (eg. mutable value types) and which objects that should not (e.g. immutable value types and, possibly, associated entities)."
+msgstr "Hibernate 中有一个非常丰富的元级别(meta-level)的模型,含有所有的实体和值类型数据的元数据。 有时这个模型对应用程序本身也会非常有用。比如说,应用程序可能在实现一种“智能”的深度拷贝算法时,通过使用 Hibernate 的元数据来了解哪些对象应该被拷贝(比如,可变的值类型数据),那些不应该(不可变的值类型数据,也许还有某些被关联的实体)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate exposes metadata via the <literal>ClassMetadata</literal> and "
-"<literal>CollectionMetadata</literal> interfaces and the <literal>Type</"
-"literal> hierarchy. Instances of the metadata interfaces can be obtained "
-"from the <literal>SessionFactory</literal>."
-msgstr ""
-"Hibernate 提供了 <literal>ClassMetadata</literal> 接口,"
-"<literal>CollectionMetadata</literal> 接口和 <literal>Type</literal> 层次体系"
-"来访问元数据。可以通过 <literal>SessionFactory</literal> 获取元数据接口的实"
-"例。 "
+msgid "Hibernate exposes metadata via the <literal>ClassMetadata</literal> and <literal>CollectionMetadata</literal> interfaces and the <literal>Type</literal> hierarchy. Instances of the metadata interfaces can be obtained from the <literal>SessionFactory</literal>."
+msgstr "Hibernate 提供了 <literal>ClassMetadata</literal> 接口,<literal>CollectionMetadata</literal> 接口和 <literal>Type</literal> 层次体系来访问元数据。可以通过 <literal>SessionFactory</literal> 获取元数据接口的实例。 "
#~ msgid ""
#~ "<![CDATA[DomesticCat fritz = new DomesticCat();\n"
@@ -1531,7 +772,6 @@
#~ "fritz.setSex('M');\n"
#~ "fritz.setName(\"Fritz\");\n"
#~ "Long generatedId = (Long) sess.save(fritz);]]>"
-
#~ msgid ""
#~ "<![CDATA[DomesticCat pk = new DomesticCat();\n"
#~ "pk.setColor(Color.TABBY);\n"
@@ -1548,10 +788,8 @@
#~ "pk.setKittens( new HashSet() );\n"
#~ "pk.addKitten(fritz);\n"
#~ "sess.save( pk, new Long(1234) );]]>"
-
#~ msgid "<![CDATA[Cat fritz = (Cat) sess.load(Cat.class, generatedId);]]>"
#~ msgstr "<![CDATA[Cat fritz = (Cat) sess.load(Cat.class, generatedId);]]>"
-
#~ msgid ""
#~ "<![CDATA[// you need to wrap primitive identifiers\n"
#~ "long id = 1234;\n"
@@ -1562,7 +800,6 @@
#~ "long id = 1234;\n"
#~ "DomesticCat pk = (DomesticCat) sess.load( DomesticCat.class, new Long"
#~ "(id) );]]>"
-
#~ msgid ""
#~ "<![CDATA[Cat cat = new DomesticCat();\n"
#~ "// load pk's state into cat\n"
@@ -1573,7 +810,6 @@
#~ "// load pk's state into cat\n"
#~ "sess.load( cat, new Long(pkId) );\n"
#~ "Set kittens = cat.getKittens();]]>"
-
#~ msgid ""
#~ "<![CDATA[Cat cat = (Cat) sess.get(Cat.class, id);\n"
#~ "if (cat==null) {\n"
@@ -1588,12 +824,10 @@
#~ " sess.save(cat, id);\n"
#~ "}\n"
#~ "return cat;]]>"
-
#~ msgid ""
#~ "<![CDATA[Cat cat = (Cat) sess.get(Cat.class, id, LockMode.UPGRADE);]]>"
#~ msgstr ""
#~ "<![CDATA[Cat cat = (Cat) sess.get(Cat.class, id, LockMode.UPGRADE);]]>"
-
#~ msgid ""
#~ "<![CDATA[sess.save(cat);\n"
#~ "sess.flush(); //force the SQL INSERT\n"
@@ -1602,7 +836,6 @@
#~ "<![CDATA[sess.save(cat);\n"
#~ "sess.flush(); //force the SQL INSERT\n"
#~ "sess.refresh(cat); //re-read the state (after the trigger executes)]]>"
-
#~ msgid ""
#~ "<![CDATA[List cats = session.createQuery(\n"
#~ " \"from Cat as cat where cat.birthdate < ?\")\n"
@@ -1655,7 +888,6 @@
#~ " \"select mother from Cat as mother left join fetch mother.kittens"
#~ "\");\n"
#~ "Set uniqueMothers = new HashSet(mothersWithKittens.list());]]>"
-
#~ msgid ""
#~ "<![CDATA[// fetch ids\n"
#~ "Iterator iter = sess.createQuery(\"from eg.Qux q order by q.likeliness\")."
@@ -1684,7 +916,6 @@
#~ " break;\n"
#~ " }\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[Iterator kittensAndMothers = sess.createQuery(\n"
#~ " \"select kitten, mother from Cat kitten join kitten.mother "
@@ -1711,7 +942,6 @@
#~ " Cat mother = (Cat) tuple[1];\n"
#~ " ....\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[Iterator results = sess.createQuery(\n"
#~ " \"select cat.color, min(cat.birthdate), count(cat) from Cat cat "
@@ -1742,7 +972,6 @@
#~ " Integer count = (Integer) row[2];\n"
#~ " .....\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[//named parameter (preferred)\n"
#~ "Query q = sess.createQuery(\"from DomesticCat cat where cat.name = :name"
@@ -1755,7 +984,6 @@
#~ "\");\n"
#~ "q.setString(\"name\", \"Fritz\");\n"
#~ "Iterator cats = q.iterate();]]>"
-
#~ msgid ""
#~ "<![CDATA[//positional parameter\n"
#~ "Query q = sess.createQuery(\"from DomesticCat cat where cat.name = ?\");\n"
@@ -1766,7 +994,6 @@
#~ "Query q = sess.createQuery(\"from DomesticCat cat where cat.name = ?\");\n"
#~ "q.setString(0, \"Izi\");\n"
#~ "Iterator cats = q.iterate();]]>"
-
#~ msgid ""
#~ "<![CDATA[//named parameter list\n"
#~ "List names = new ArrayList();\n"
@@ -1785,7 +1012,6 @@
#~ "namesList)\");\n"
#~ "q.setParameterList(\"namesList\", names);\n"
#~ "List cats = q.list();]]>"
-
#~ msgid ""
#~ "<![CDATA[Query q = sess.createQuery(\"from DomesticCat cat\");\n"
#~ "q.setFirstResult(20);\n"
@@ -1796,7 +1022,6 @@
#~ "q.setFirstResult(20);\n"
#~ "q.setMaxResults(10);\n"
#~ "List cats = q.list();]]>"
-
#~ msgid ""
#~ "<![CDATA[Query q = sess.createQuery(\"select cat.name, cat from "
#~ "DomesticCat cat \" +\n"
@@ -1847,7 +1072,6 @@
#~ "\n"
#~ "}\n"
#~ "cats.close()]]>"
-
#~ msgid ""
#~ "<![CDATA[<query name=\"ByNameAndMaximumWeight\"><![CDATA[\n"
#~ " from eg.DomesticCat as cat\n"
@@ -1860,7 +1084,6 @@
#~ " where cat.name = ?\n"
#~ " and cat.weight > ?\n"
#~ "] ]></query>]]>"
-
#~ msgid ""
#~ "<![CDATA[Query q = sess.getNamedQuery(\"ByNameAndMaximumWeight\");\n"
#~ "q.setString(0, name);\n"
@@ -1871,7 +1094,6 @@
#~ "q.setString(0, name);\n"
#~ "q.setInt(1, minWeight);\n"
#~ "List cats = q.list();]]>"
-
#~ msgid ""
#~ "<![CDATA[Collection blackKittens = session.createFilter(\n"
#~ " pk.getKittens(), \n"
@@ -1886,7 +1108,6 @@
#~ " .setParameter( Color.BLACK, Hibernate.custom(ColorUserType.class) )\n"
#~ " .list()\n"
#~ ");]]>"
-
#~ msgid ""
#~ "<![CDATA[Collection blackKittenMates = session.createFilter(\n"
#~ " pk.getKittens(), \n"
@@ -1897,7 +1118,6 @@
#~ " pk.getKittens(), \n"
#~ " \"select this.mate where this.color = eg.Color.BLACK.intValue\")\n"
#~ " .list();]]>"
-
#~ msgid ""
#~ "<![CDATA[Collection tenKittens = session.createFilter(\n"
#~ " mother.getKittens(), \"\")\n"
@@ -1908,7 +1128,6 @@
#~ " mother.getKittens(), \"\")\n"
#~ " .setFirstResult(0).setMaxResults(10)\n"
#~ " .list();]]>"
-
#~ msgid ""
#~ "<![CDATA[Criteria crit = session.createCriteria(Cat.class);\n"
#~ "crit.add( Restrictions.eq( \"color\", eg.Color.BLACK ) );\n"
@@ -1919,7 +1138,6 @@
#~ "crit.add( Restrictions.eq( \"color\", eg.Color.BLACK ) );\n"
#~ "crit.setMaxResults(10);\n"
#~ "List cats = crit.list();]]>"
-
#~ msgid ""
#~ "<![CDATA[List cats = session.createSQLQuery(\"SELECT {cat.*} FROM CAT "
#~ "{cat} WHERE ROWNUM<10\")\n"
@@ -1930,7 +1148,6 @@
#~ "{cat} WHERE ROWNUM<10\")\n"
#~ " .addEntity(\"cat\", Cat.class)\n"
#~ ".list();]]>"
-
#~ msgid ""
#~ "<![CDATA[List cats = session.createSQLQuery(\n"
#~ " \"SELECT {cat}.ID AS {cat.id}, {cat}.SEX AS {cat.sex}, \" +\n"
@@ -1947,7 +1164,6 @@
#~ " \"FROM CAT {cat} WHERE ROWNUM<10\")\n"
#~ " .addEntity(\"cat\", Cat.class)\n"
#~ ".list()]]>"
-
#~ msgid ""
#~ "<![CDATA[DomesticCat cat = (DomesticCat) sess.load( Cat.class, new Long"
#~ "(69) );\n"
@@ -1960,7 +1176,6 @@
#~ "cat.setName(\"PK\");\n"
#~ "sess.flush(); // changes to cat are automatically detected and "
#~ "persisted]]>"
-
#~ msgid ""
#~ "<![CDATA[// in the first session\n"
#~ "Cat cat = (Cat) firstSession.load(Cat.class, catId);\n"
@@ -1985,7 +1200,6 @@
#~ "// later, in a new session\n"
#~ "secondSession.update(cat); // update cat\n"
#~ "secondSession.update(mate); // update mate]]>"
-
#~ msgid ""
#~ "<![CDATA[//just reassociate:\n"
#~ "sess.lock(fritz, LockMode.NONE);\n"
@@ -2000,7 +1214,6 @@
#~ "sess.lock(izi, LockMode.READ);\n"
#~ "//do a version check, using SELECT ... FOR UPDATE, then reassociate:\n"
#~ "sess.lock(pk, LockMode.UPGRADE);]]>"
-
#~ msgid ""
#~ "<![CDATA[// in the first session\n"
#~ "Cat cat = (Cat) firstSession.load(Cat.class, catID);\n"
@@ -2027,10 +1240,8 @@
#~ "non-null id)\n"
#~ "secondSession.saveOrUpdate(mate); // save the new instance (mate has a "
#~ "null id)]]>"
-
#~ msgid "<![CDATA[sess.delete(cat);]]>"
#~ msgstr "<![CDATA[sess.delete(cat);]]>"
-
#~ msgid ""
#~ "<![CDATA[//retrieve a cat from one database\n"
#~ "Session session1 = factory1.openSession();\n"
@@ -2059,7 +1270,6 @@
#~ "session2.replicate(cat, ReplicationMode.LATEST_VERSION);\n"
#~ "tx2.commit();\n"
#~ "session2.close();]]>"
-
#~ msgid ""
#~ "<![CDATA[sess = sf.openSession();\n"
#~ "Transaction tx = sess.beginTransaction();\n"
@@ -2092,15 +1302,12 @@
#~ "...\n"
#~ "tx.commit(); // flush occurs\n"
#~ "sess.close();]]>"
-
#~ msgid "<![CDATA[<one-to-one name=\"person\" cascade=\"persist\"/>]]>"
#~ msgstr "<![CDATA[<one-to-one name=\"person\" cascade=\"persist\"/>]]>"
-
#~ msgid ""
#~ "<![CDATA[<one-to-one name=\"person\" cascade=\"persist,delete,lock\"/>]]>"
#~ msgstr ""
#~ "<![CDATA[<one-to-one name=\"person\" cascade=\"persist,delete,lock\"/>]]>"
-
#~ msgid ""
#~ "<![CDATA[Cat fritz = ......;\n"
#~ "ClassMetadata catMeta = sessionfactory.getClassMetadata(Cat.class);\n"
@@ -2133,3 +1340,4 @@
#~ " namedValues.put( propertyNames[i], propertyValues[i] );\n"
#~ " }\n"
#~ "}]]>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/toolset_guide.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/toolset_guide.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/toolset_guide.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:16\n"
-"PO-Revision-Date: 2009-12-04 15:04+1000\n"
+"PO-Revision-Date: 2010-03-16 10:12+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,80 +21,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Roundtrip engineering with Hibernate is possible using a set of Eclipse "
-"plugins, commandline tools, and Ant tasks."
-msgstr ""
-"可以通过一系列 Eclipse 插件、命令行工具和 Ant 任务来进行与 Hibernate 关联的转"
-"换。 "
+msgid "Roundtrip engineering with Hibernate is possible using a set of Eclipse plugins, commandline tools, and Ant tasks."
+msgstr "可以通过一系列 Eclipse 插件、命令行工具和 Ant 任务来进行与 Hibernate 关联的转换。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Hibernate Tools</emphasis> currently include plugins for the "
-"Eclipse IDE as well as Ant tasks for reverse engineering of existing "
-"databases:"
-msgstr ""
-"除了 Ant 任务外,当前的 <emphasis>Hibernate Tools</emphasis> 也包含了 "
-"Eclipse IDE 的插件,用于与现存数据库的逆向工程。 "
+msgid "<emphasis>Hibernate Tools</emphasis> currently include plugins for the Eclipse IDE as well as Ant tasks for reverse engineering of existing databases:"
+msgstr "除了 Ant 任务外,当前的 <emphasis>Hibernate Tools</emphasis> 也包含了 Eclipse IDE 的插件,用于与现存数据库的逆向工程。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Mapping Editor:</emphasis> an editor for Hibernate XML mapping "
-"files that supports auto-completion and syntax highlighting. It also "
-"supports semantic auto-completion for class names and property/field names, "
-"making it more versatile than a normal XML editor."
-msgstr ""
-"<emphasis>Mapping Editor:</emphasis> Hibernate XML 映射文件的编辑器,支持自动"
-"完成和语法高亮。它也支持对类名和属性/字段名的语义自动完成,比通常的 XML 编辑"
-"器方便得多。 "
+msgid "<emphasis>Mapping Editor:</emphasis> an editor for Hibernate XML mapping files that supports auto-completion and syntax highlighting. It also supports semantic auto-completion for class names and property/field names, making it more versatile than a normal XML editor."
+msgstr "<emphasis>Mapping Editor:</emphasis> Hibernate XML 映射文件的编辑器,支持自动完成和语法高亮。它也支持对类名和属性/字段名的语义自动完成,比通常的 XML 编辑器方便得多。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Console:</emphasis> the console is a new view in Eclipse. In "
-"addition to a tree overview of your console configurations, you are also "
-"provided with an interactive view of your persistent classes and their "
-"relationships. The console allows you to execute HQL queries against your "
-"database and browse the result directly in Eclipse."
-msgstr ""
-"<emphasis>Console:</emphasis> Console 是 Eclipse 的一个新视图。除了对你的 "
-"console 配置的树状概览,你还可以获得对你持久化类及其关联的交互式视图。"
-"Console 允许你对数据库执行 HQL 查询,并直接在 Eclipse 中浏览结果。 "
+msgid "<emphasis>Console:</emphasis> the console is a new view in Eclipse. In addition to a tree overview of your console configurations, you are also provided with an interactive view of your persistent classes and their relationships. The console allows you to execute HQL queries against your database and browse the result directly in Eclipse."
+msgstr "<emphasis>Console:</emphasis> Console 是 Eclipse 的一个新视图。除了对你的 console 配置的树状概览,你还可以获得对你持久化类及其关联的交互式视图。Console 允许你对数据库执行 HQL 查询,并直接在 Eclipse 中浏览结果。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Development Wizards:</emphasis> several wizards are provided with "
-"the Hibernate Eclipse tools. You can use a wizard to quickly generate "
-"Hibernate configuration (cfg.xml) files, or to reverse engineer an existing "
-"database schema into POJO source files and Hibernate mapping files. The "
-"reverse engineering wizard supports customizable templates."
-msgstr ""
-"<emphasis>Development Wizards:</emphasis> 在 Hibernate Eclipse tools 中还提供"
-"了几个向导;你可以用向导快速生成 Hibernate 配置文件(cfg.xml),你甚至还可以"
-"同现存的数据库 schema 中反向工程出 POJO 源代码与 Hibernate 映射文件。反向工程"
-"支持可定制的模版。 "
+msgid "<emphasis>Development Wizards:</emphasis> several wizards are provided with the Hibernate Eclipse tools. You can use a wizard to quickly generate Hibernate configuration (cfg.xml) files, or to reverse engineer an existing database schema into POJO source files and Hibernate mapping files. The reverse engineering wizard supports customizable templates."
+msgstr "<emphasis>Development Wizards:</emphasis> 在 Hibernate Eclipse tools 中还提供了几个向导;你可以用向导快速生成 Hibernate 配置文件(cfg.xml),你甚至还可以同现存的数据库 schema 中反向工程出 POJO 源代码与 Hibernate 映射文件。反向工程支持可定制的模版。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Please refer to the <emphasis>Hibernate Tools</emphasis> package "
-"documentation for more information."
-msgstr ""
-"要得到更多信息,请查阅 <emphasis>Hibernate Tools</emphasis> 包及其文档。 "
+msgid "Please refer to the <emphasis>Hibernate Tools</emphasis> package documentation for more information."
+msgstr "要得到更多信息,请查阅 <emphasis>Hibernate Tools</emphasis> 包及其文档。 "
#. Tag: para
#, no-c-format
-msgid ""
-"However, the Hibernate main package comes bundled with an integrated tool : "
-"<emphasis>SchemaExport</emphasis> aka <literal>hbm2ddl</literal>.It can even "
-"be used from \"inside\" Hibernate."
-msgstr ""
-"同时,Hibernate 主发行包还附带了一个集成的工具(它甚至可以在 Hibernate“内"
-"部”快速运行)<emphasis>SchemaExport</emphasis> ,也就是 <literal>hbm2ddl</"
-"literal>。 "
+msgid "However, the Hibernate main package comes bundled with an integrated tool : <emphasis>SchemaExport</emphasis> aka <literal>hbm2ddl</literal>.It can even be used from \"inside\" Hibernate."
+msgstr "同时,Hibernate 主发行包还附带了一个集成的工具(它甚至可以在 Hibernate“内部”快速运行)<emphasis>SchemaExport</emphasis> ,也就是 <literal>hbm2ddl</literal>。 "
#. Tag: title
#, no-c-format
@@ -103,34 +61,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"DDL can be generated from your mapping files by a Hibernate utility. The "
-"generated schema includes referential integrity constraints, primary and "
-"foreign keys, for entity and collection tables. Tables and sequences are "
-"also created for mapped identifier generators."
-msgstr ""
-"可以从你的映射文件使用一个 Hibernate 工具生成 DDL。生成的 schema 包含有对实体"
-"和集合类表的完整性引用约束(主键和外键)。涉及到的标示符生成器所需的表和 "
-"sequence 也会同时生成。 "
+msgid "DDL can be generated from your mapping files by a Hibernate utility. The generated schema includes referential integrity constraints, primary and foreign keys, for entity and collection tables. Tables and sequences are also created for mapped identifier generators."
+msgstr "可以从你的映射文件使用一个 Hibernate 工具生成 DDL。生成的 schema 包含有对实体和集合类表的完整性引用约束(主键和外键)。涉及到的标示符生成器所需的表和 sequence 也会同时生成。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You <emphasis>must</emphasis> specify a SQL <literal>Dialect</literal> via "
-"the <literal>hibernate.dialect</literal> property when using this tool, as "
-"DDL is highly vendor-specific."
-msgstr ""
-"在使用这个工具的时候,你<emphasis>必须</emphasis> 通过 <literal>hibernate."
-"dialet</literal> 属性指定一个 SQL<literal> 方言(Dialet)</literal>,因为 "
-"DDL 是与供应商高度相关的。 "
+msgid "You <emphasis>must</emphasis> specify a SQL <literal>Dialect</literal> via the <literal>hibernate.dialect</literal> property when using this tool, as DDL is highly vendor-specific."
+msgstr "在使用这个工具的时候,你<emphasis>必须</emphasis> 通过 <literal>hibernate.dialet</literal> 属性指定一个 SQL<literal> 方言(Dialet)</literal>,因为 DDL 是与供应商高度相关的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"First, you must customize your mapping files to improve the generated "
-"schema. The next section covers schema customization."
-msgstr ""
-"首先,要定制你的映射文件,来改善生成的 schema。下章将涵盖 schema 定制。"
+msgid "First, you must customize your mapping files to improve the generated schema. The next section covers schema customization."
+msgstr "首先,要定制你的映射文件,来改善生成的 schema。下章将涵盖 schema 定制。"
#. Tag: title
#, no-c-format
@@ -139,94 +81,47 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Many Hibernate mapping elements define optional attributes named "
-"<literal>length</literal>, <literal>precision</literal> and <literal>scale</"
-"literal>. You can set the length, precision and scale of a column with this "
-"attribute."
-msgstr ""
-"很多 Hibernate 映射元素定义了可选的 <literal>length</literal>、"
-"<literal>precision</literal> 或者 <literal>scale</literal> 属性。你可以通过这"
-"个属性设置字段的长度、精度、小数点位数。 "
+msgid "Many Hibernate mapping elements define optional attributes named <literal>length</literal>, <literal>precision</literal> and <literal>scale</literal>. You can set the length, precision and scale of a column with this attribute."
+msgstr "很多 Hibernate 映射元素定义了可选的 <literal>length</literal>、<literal>precision</literal> 或者 <literal>scale</literal> 属性。你可以通过这个属性设置字段的长度、精度、小数点位数。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Some tags also accept a <literal>not-null</literal> attribute for generating "
-"a <literal>NOT NULL</literal> constraint on table columns, and a "
-"<literal>unique</literal> attribute for generating <literal>UNIQUE</literal> "
-"constraint on table columns."
-msgstr ""
-"有些 tag 还接受 <literal>not-null</literal> 属性(用来在表字段上生成 "
-"<literal>NOT NULL</literal> 约束)和 <literal>unique</literal> 属性(用来在表"
-"字段上生成 <literal>UNIQUE</literal> 约束)。 "
+msgid "Some tags also accept a <literal>not-null</literal> attribute for generating a <literal>NOT NULL</literal> constraint on table columns, and a <literal>unique</literal> attribute for generating <literal>UNIQUE</literal> constraint on table columns."
+msgstr "有些 tag 还接受 <literal>not-null</literal> 属性(用来在表字段上生成 <literal>NOT NULL</literal> 约束)和 <literal>unique</literal> 属性(用来在表字段上生成 <literal>UNIQUE</literal> 约束)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>unique-key</literal> attribute can be used to group columns in a "
-"single, unique key constraint. Currently, the specified value of the "
-"<literal>unique-key</literal> attribute is <emphasis>not</emphasis> used to "
-"name the constraint in the generated DDL. It is only used to group the "
-"columns in the mapping file."
-msgstr ""
-"<literal>unique-key</literal> 属性可以对成组的字段指定一个唯一键约束(unique "
-"key constraint)。目前,<literal>unique-key</literal> 属性指定的值在生成 DDL "
-"时<emphasis>并不会</emphasis>被当作这个约束的名字,它们只是在用来在映射文件内"
-"部用作区分的。 "
+msgid "A <literal>unique-key</literal> attribute can be used to group columns in a single, unique key constraint. Currently, the specified value of the <literal>unique-key</literal> attribute is <emphasis>not</emphasis> used to name the constraint in the generated DDL. It is only used to group the columns in the mapping file."
+msgstr "<literal>unique-key</literal> 属性可以对成组的字段指定一个唯一键约束(unique key constraint)。目前,<literal>unique-key</literal> 属性指定的值在生成 DDL 时<emphasis>并不会</emphasis>被当作这个约束的名字,它们只是在用来在映射文件内部用作区分的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"An <literal>index</literal> attribute specifies the name of an index that "
-"will be created using the mapped column or columns. Multiple columns can be "
-"grouped into the same index by simply specifying the same index name."
-msgstr ""
-"<literal>index</literal> 属性会用对应的字段(一个或多个)生成一个 index,它指"
-"出了这个 index 的名字。如果多个字段对应的 index 名字相同,就会生成包含这些字"
-"段的 index。 "
+msgid "An <literal>index</literal> attribute specifies the name of an index that will be created using the mapped column or columns. Multiple columns can be grouped into the same index by simply specifying the same index name."
+msgstr "<literal>index</literal> 属性会用对应的字段(一个或多个)生成一个 index,它指出了这个 index 的名字。如果多个字段对应的 index 名字相同,就会生成包含这些字段的 index。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>foreign-key</literal> attribute can be used to override the name "
-"of any generated foreign key constraint."
-msgstr ""
-"<literal>foreign-key</literal> 属性可以用来覆盖任何生成的外键约束的名字。 "
+msgid "A <literal>foreign-key</literal> attribute can be used to override the name of any generated foreign key constraint."
+msgstr "<literal>foreign-key</literal> 属性可以用来覆盖任何生成的外键约束的名字。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Many mapping elements also accept a child <literal><column></literal> "
-"element. This is particularly useful for mapping multi-column types:"
-msgstr ""
-"很多映射元素还接受 <literal><column></literal> 子元素。这在定义跨越多字"
-"段的类型时特别有用。"
+msgid "Many mapping elements also accept a child <literal><column></literal> element. This is particularly useful for mapping multi-column types:"
+msgstr "很多映射元素还接受 <literal><column></literal> 子元素。这在定义跨越多字段的类型时特别有用。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>default</literal> attribute allows you to specify a default "
-"value for a column.You should assign the same value to the mapped property "
-"before saving a new instance of the mapped class."
-msgstr ""
-"<literal>default</literal> 属性为字段指定一个默认值(在保存被映射的类的新实例"
-"之前,你应该将同样的值赋于对应的属性)。"
+msgid "The <literal>default</literal> attribute allows you to specify a default value for a column.You should assign the same value to the mapped property before saving a new instance of the mapped class."
+msgstr "<literal>default</literal> 属性为字段指定一个默认值(在保存被映射的类的新实例之前,你应该将同样的值赋于对应的属性)。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>sql-type</literal> attribute allows the user to override the "
-"default mapping of a Hibernate type to SQL datatype."
-msgstr ""
-"<literal>sql-type</literal> 属性允许用户覆盖默认的 Hibernate 类型到 SQL 数据"
-"类型的映射。"
+msgid "The <literal>sql-type</literal> attribute allows the user to override the default mapping of a Hibernate type to SQL datatype."
+msgstr "<literal>sql-type</literal> 属性允许用户覆盖默认的 Hibernate 类型到 SQL 数据类型的映射。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>check</literal> attribute allows you to specify a check "
-"constraint."
+msgid "The <literal>check</literal> attribute allows you to specify a check constraint."
msgstr "<literal>check</literal> 属性允许用户指定一个约束检查。"
#. Tag: para
@@ -356,18 +251,8 @@
#. Tag: entry
#, no-c-format
-msgid ""
-"specifies the name of the foreign key constraint generated for an "
-"association, for a <literal><one-to-one></literal>, <literal><many-"
-"to-one></literal>, <literal><key></literal>, or <literal><many-"
-"to-many></literal> mapping element. Note that <literal>inverse=\"true\"</"
-"literal> sides will not be considered by <literal>SchemaExport</literal>."
-msgstr ""
-"指明一个外键的名字,它是为关联生成的,或者是为 <literal><one-to-one></"
-"literal>, <literal><many-to-one></literal>, <literal><key></"
-"literal>, or <literal><many-to-many></literal> 映射元素。注意 "
-"<literal>inverse=\"true\"</literal> 会被 <literal>SchemaExport</literal> 忽"
-"略。"
+msgid "specifies the name of the foreign key constraint generated for an association, for a <literal><one-to-one></literal>, <literal><many-to-one></literal>, <literal><key></literal>, or <literal><many-to-many></literal> mapping element. Note that <literal>inverse=\"true\"</literal> sides will not be considered by <literal>SchemaExport</literal>."
+msgstr "指明一个外键的名字,它是为关联生成的,或者是为 <literal><one-to-one></literal>, <literal><many-to-one></literal>, <literal><key></literal>, or <literal><many-to-many></literal> 映射元素。注意 <literal>inverse=\"true\"</literal> 会被 <literal>SchemaExport</literal> 忽略。"
#. Tag: entry
#, no-c-format
@@ -375,15 +260,13 @@
msgstr "<literal>sql-type</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>SQL column type</literal>"
-msgstr "<literal>sql-type</literal>"
+msgstr "<literal>SQL column type</literal>"
#. Tag: entry
#, no-c-format
-msgid ""
-"overrides the default column type (attribute of <literal><column></"
-"literal> element only)"
+msgid "overrides the default column type (attribute of <literal><column></literal> element only)"
msgstr "覆盖默认的字段类型(只能用于 <literal><column></literal> 属性)"
#. Tag: entry
@@ -413,20 +296,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal><comment></literal> element allows you to specify "
-"comments for the generated schema."
-msgstr ""
-"<literal><comment></literal> 元素可以让你在生成的 schema 中加入注释。"
+msgid "The <literal><comment></literal> element allows you to specify comments for the generated schema."
+msgstr "<literal><comment></literal> 元素可以让你在生成的 schema 中加入注释。"
#. Tag: para
#, no-c-format
-msgid ""
-"This results in a <literal>comment on table</literal> or <literal>comment on "
-"column</literal> statement in the generated DDL where supported."
-msgstr ""
-"结果是在生成的 DDL 中包含 <literal>comment on table</literal> 或者 "
-"<literal>comment on column</literal> 语句(假若支持的话)。 "
+msgid "This results in a <literal>comment on table</literal> or <literal>comment on column</literal> statement in the generated DDL where supported."
+msgstr "结果是在生成的 DDL 中包含 <literal>comment on table</literal> 或者 <literal>comment on column</literal> 语句(假若支持的话)。 "
#. Tag: title
#, no-c-format
@@ -435,30 +311,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>SchemaExport</literal> tool writes a DDL script to standard out "
-"and/or executes the DDL statements."
-msgstr ""
-"<literal>SchemaExport</literal> 工具把 DDL 脚本写到标准输出,同时/或者执行 "
-"DDL 语句。"
+msgid "The <literal>SchemaExport</literal> tool writes a DDL script to standard out and/or executes the DDL statements."
+msgstr "<literal>SchemaExport</literal> 工具把 DDL 脚本写到标准输出,同时/或者执行 DDL 语句。"
#. Tag: para
#, no-c-format
-msgid ""
-"The following table displays the <literal>SchemaExport</literal> command "
-"line options"
+msgid "The following table displays the <literal>SchemaExport</literal> command line options"
msgstr "下表显示了 <literal>SchemaExport</literal> 命令行选项"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaExport</literal> <emphasis>options "
-"mapping_files</emphasis>"
-msgstr ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaExport</literal> <emphasis>options "
-"mapping_files</emphasis>"
+msgid "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaExport</literal> <emphasis>options mapping_files</emphasis>"
+msgstr "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaExport</literal> <emphasis>options mapping_files</emphasis>"
#. Tag: title
#, no-c-format
@@ -516,9 +380,9 @@
msgstr "不执行在数据库中运行的步骤 "
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>--output=my_schema.ddl</literal>"
-msgstr "<literal>scale</literal>"
+msgstr "<literal>--output=my_schema.ddl</literal>"
#. Tag: entry
#, no-c-format
@@ -526,9 +390,9 @@
msgstr "把输出的 ddl 脚本输出到一个文件"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>--naming=eg.MyNamingStrategy</literal>"
-msgstr "<literal>--naming=eg.MyNamingStrategy</literal>"
+msgstr "<literal>--naming=eg.MyNamingStrategy</literal> "
#. Tag: entry
#, no-c-format
@@ -536,9 +400,9 @@
msgstr "选择 <literal>NamingStrategy</literal>"
#. Tag: entry
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>--config=hibernate.cfg.xml</literal>"
-msgstr "<literal>hibernate.dialect</literal>"
+msgstr "<literal>--config=hibernate.cfg.xml</literal>"
#. Tag: entry
#, no-c-format
@@ -592,11 +456,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"as system properties with <literal>-D</literal><emphasis><property></"
-"emphasis>"
-msgstr ""
-"通过 <literal>-D</literal><emphasis><property></emphasis> 系统参数"
+msgid "as system properties with <literal>-D</literal><emphasis><property></emphasis>"
+msgstr "通过 <literal>-D</literal><emphasis><property></emphasis> 系统参数"
#. Tag: para
#, no-c-format
@@ -606,9 +467,7 @@
#. Tag: para
#, no-c-format
msgid "in a named properties file with <literal>--properties</literal>"
-msgstr ""
-"位于一个其它名字的 properties 文件中,然后用 <literal>--properties</literal> "
-"参数指定"
+msgstr "位于一个其它名字的 properties 文件中,然后用 <literal>--properties</literal> 参数指定"
#. Tag: para
#, no-c-format
@@ -682,8 +541,7 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can call <literal>SchemaExport</literal> from your Ant build script:"
+msgid "You can call <literal>SchemaExport</literal> from your Ant build script:"
msgstr "你可以在你的 Ant build 脚本中调用 <literal>SchemaExport</literal>:"
#. Tag: title
@@ -693,25 +551,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>SchemaUpdate</literal> tool will update an existing schema with "
-"\"incremental\" changes. The <literal>SchemaUpdate</literal> depends upon "
-"the JDBC metadata API and, as such, will not work with all JDBC drivers."
-msgstr ""
-"<literal>SchemaUpdate</literal> 工具对已存在的 schema 采用\"增量\"方式进行更"
-"新。注意 <literal>SchemaUpdate</literal> 严重依赖于 JDBC metadata API,所以它"
-"并非对所有 JDBC 驱动都有效。 "
+msgid "The <literal>SchemaUpdate</literal> tool will update an existing schema with \"incremental\" changes. The <literal>SchemaUpdate</literal> depends upon the JDBC metadata API and, as such, will not work with all JDBC drivers."
+msgstr "<literal>SchemaUpdate</literal> 工具对已存在的 schema 采用\"增量\"方式进行更新。注意 <literal>SchemaUpdate</literal> 严重依赖于 JDBC metadata API,所以它并非对所有 JDBC 驱动都有效。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaUpdate</literal> <emphasis>options "
-"mapping_files</emphasis>"
-msgstr ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaUpdate</literal> <emphasis>options "
-"mapping_files</emphasis>"
+msgid "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaUpdate</literal> <emphasis>options mapping_files</emphasis>"
+msgstr "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaUpdate</literal> <emphasis>options mapping_files</emphasis>"
#. Tag: title
#, no-c-format
@@ -750,33 +596,17 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>SchemaValidator</literal> tool will validate that the existing "
-"database schema \"matches\" your mapping documents. The "
-"<literal>SchemaValidator</literal> depends heavily upon the JDBC metadata "
-"API and, as such, will not work with all JDBC drivers. This tool is "
-"extremely useful for testing."
-msgstr ""
-"<literal>SchemaValidator</literal> 工具会比较数据库现状是否与映射文档“匹配”。"
-"注意,<literal>SchemaValidator</literal> 严重依赖于 JDBC 的 metadata API,因"
-"此不是对所有的 JDBC 驱动都适用。这一工具在测试的时候特别有用。 "
+msgid "The <literal>SchemaValidator</literal> tool will validate that the existing database schema \"matches\" your mapping documents. The <literal>SchemaValidator</literal> depends heavily upon the JDBC metadata API and, as such, will not work with all JDBC drivers. This tool is extremely useful for testing."
+msgstr "<literal>SchemaValidator</literal> 工具会比较数据库现状是否与映射文档“匹配”。注意,<literal>SchemaValidator</literal> 严重依赖于 JDBC 的 metadata API,因此不是对所有的 JDBC 驱动都适用。这一工具在测试的时候特别有用。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaValidator</literal> "
-"<emphasis>options mapping_files</emphasis>"
-msgstr ""
-"<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> "
-"<literal>org.hibernate.tool.hbm2ddl.SchemaValidator</literal> "
-"<emphasis>options mapping_files</emphasis>"
+msgid "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaValidator</literal> <emphasis>options mapping_files</emphasis>"
+msgstr "<literal>java -cp </literal><emphasis>hibernate_classpaths</emphasis> <literal>org.hibernate.tool.hbm2ddl.SchemaValidator</literal> <emphasis>options mapping_files</emphasis>"
#. Tag: para
#, no-c-format
-msgid ""
-"The following table displays the <literal>SchemaValidator</literal> command "
-"line options:"
+msgid "The following table displays the <literal>SchemaValidator</literal> command line options:"
msgstr "下表显示了 <literal>SchemaValidator</literal> 命令行参数:"
#. Tag: title
@@ -917,54 +747,40 @@
#~ " ...\n"
#~ " <property name=\"bar\" type=\"float\"/>\n"
#~ "</class>"
-
#~ msgid "length"
#~ msgstr "length"
-
#~ msgid "precision"
#~ msgstr "precision"
-
#~ msgid "scale"
#~ msgstr "scale"
-
#~ msgid "not-null"
#~ msgstr "not-null"
-
#~ msgid "true|false"
#~ msgstr "true|false"
-
#~ msgid "unique"
#~ msgstr "unique"
-
#~ msgid "index"
#~ msgstr "index"
-
#~ msgid "index_name"
#~ msgstr "index_name"
-
#~ msgid "unique-key"
#~ msgstr "unique-key"
-
#~ msgid "unique_key_name"
#~ msgstr "unique_key_name"
-
#~ msgid "foreign-key"
#~ msgstr "foreign-key"
#, fuzzy
#~ msgid "foreign_key_name"
#~ msgstr "foreign-key"
-
#~ msgid "sql-type"
#~ msgstr "sql-type"
#, fuzzy
#~ msgid "SQL column type"
#~ msgstr "<literal>SQL column type</literal>"
-
#~ msgid "default"
#~ msgstr "default"
-
#~ msgid "check"
#~ msgstr "check"
@@ -993,16 +809,12 @@
#~ " <comment>Balance in USD</comment>\n"
#~ " </column>\n"
#~ "</property>"
-
#~ msgid "--quiet"
#~ msgstr "--quiet"
-
#~ msgid "--drop"
#~ msgstr "--drop"
-
#~ msgid "--create"
#~ msgstr "--create"
-
#~ msgid "--text"
#~ msgstr "--text"
@@ -1013,13 +825,10 @@
#, fuzzy
#~ msgid "--config=hibernate.cfg.xml"
#~ msgstr "<literal>--config=hibernate.cfg.xml</literal>"
-
#~ msgid "--properties=hibernate.properties"
#~ msgstr "--properties=hibernate.properties"
-
#~ msgid "--format"
#~ msgstr "--format"
-
#~ msgid "--delimiter=;"
#~ msgstr "--delimiter=;"
@@ -1034,16 +843,12 @@
#, fuzzy
#~ msgid "hibernate.connection.driver_class"
#~ msgstr "hibernate.connection.url"
-
#~ msgid "hibernate.connection.url"
#~ msgstr "hibernate.connection.url"
-
#~ msgid "hibernate.connection.username"
#~ msgstr "hibernate.connection.username"
-
#~ msgid "hibernate.connection.password"
#~ msgstr "hibernate.connection.password"
-
#~ msgid "hibernate.dialect"
#~ msgstr "hibernate.dialect"
@@ -1158,6 +963,6 @@
#~ " </fileset>\n"
#~ " </schemaupdate>\n"
#~ "</target>"
-
#~ msgid "<!-- <emphasis>Ant Tasks:</emphasis> -->"
#~ msgstr "<!-- <emphasis>Ant Tasks:</emphasis> -->"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/transactions.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/transactions.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/transactions.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:16\n"
-"PO-Revision-Date: 2010-01-11 10:41+1000\n"
+"PO-Revision-Date: 2010-03-16 09:58+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -21,54 +21,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The most important point about Hibernate and concurrency control is that it "
-"is easy to understand. Hibernate directly uses JDBC connections and JTA "
-"resources without adding any additional locking behavior. It is recommended "
-"that you spend some time with the JDBC, ANSI, and transaction isolation "
-"specification of your database management system."
-msgstr ""
-"Hibernate 的事务和并发控制很容易掌握。Hibernate 直接使用 JDBC 连接和 JTA 资"
-"源,不添加任何附加锁定行为。我们强烈推荐你花点时间了解 JDBC 编程,ANSI SQL 查"
-"询语言和你使用的数据库系统的事务隔离规范。 "
+msgid "The most important point about Hibernate and concurrency control is that it is easy to understand. Hibernate directly uses JDBC connections and JTA resources without adding any additional locking behavior. It is recommended that you spend some time with the JDBC, ANSI, and transaction isolation specification of your database management system."
+msgstr "Hibernate 的事务和并发控制很容易掌握。Hibernate 直接使用 JDBC 连接和 JTA 资源,不添加任何附加锁定行为。我们强烈推荐你花点时间了解 JDBC 编程,ANSI SQL 查询语言和你使用的数据库系统的事务隔离规范。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate does not lock objects in memory. Your application can expect the "
-"behavior as defined by the isolation level of your database transactions. "
-"Through <literal>Session</literal>, which is also a transaction-scoped "
-"cache, Hibernate provides repeatable reads for lookup by identifier and "
-"entity queries and not reporting queries that return scalar values."
-msgstr ""
-"Hibernate 不锁定内存中的对象。你的应用程序会按照你的数据库事务的隔离级别规定"
-"的那样运作。幸亏有了 <literal>Session</literal>,使得 Hibernate 通过标识符查"
-"找,和实体查询(不是返回标量值的报表查询)提供了可重复的读取(Repeatable "
-"reads)功能,<literal>Session</literal> 同时也是事务范围内的缓存(cache)。 "
+msgid "Hibernate does not lock objects in memory. Your application can expect the behavior as defined by the isolation level of your database transactions. Through <literal>Session</literal>, which is also a transaction-scoped cache, Hibernate provides repeatable reads for lookup by identifier and entity queries and not reporting queries that return scalar values."
+msgstr "Hibernate 不锁定内存中的对象。你的应用程序会按照你的数据库事务的隔离级别规定的那样运作。幸亏有了 <literal>Session</literal>,使得 Hibernate 通过标识符查找,和实体查询(不是返回标量值的报表查询)提供了可重复的读取(Repeatable reads)功能,<literal>Session</literal> 同时也是事务范围内的缓存(cache)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In addition to versioning for automatic optimistic concurrency control, "
-"Hibernate also offers, using the <literal>SELECT FOR UPDATE</literal> "
-"syntax, a (minor) API for pessimistic locking of rows. Optimistic "
-"concurrency control and this API are discussed later in this chapter."
-msgstr ""
-"除了对自动乐观并发控制提供版本管理,针对行级悲观锁定,Hibernate 也提供了辅助"
-"的(较小的)API,它使用了 <literal>SELECT FOR UPDATE</literal> 的 SQL 语法。"
-"本章后面会讨论乐观并发控制和这个API。 "
+msgid "In addition to versioning for automatic optimistic concurrency control, Hibernate also offers, using the <literal>SELECT FOR UPDATE</literal> syntax, a (minor) API for pessimistic locking of rows. Optimistic concurrency control and this API are discussed later in this chapter."
+msgstr "除了对自动乐观并发控制提供版本管理,针对行级悲观锁定,Hibernate 也提供了辅助的(较小的)API,它使用了 <literal>SELECT FOR UPDATE</literal> 的 SQL 语法。本章后面会讨论乐观并发控制和这个API。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The discussion of concurrency control in Hibernate begins with the "
-"granularity of <literal>Configuration</literal>, <literal>SessionFactory</"
-"literal>, and <literal>Session</literal>, as well as database transactions "
-"and long conversations."
-msgstr ""
-"我们从 <literal>Configuration</literal>层、<literal>SessionFactory</literal> "
-"层,和 <literal>Session</literal> 层开始讨论 Hibernate 的并行控制、数据库事务"
-"和应用程序的长事务。 "
+msgid "The discussion of concurrency control in Hibernate begins with the granularity of <literal>Configuration</literal>, <literal>SessionFactory</literal>, and <literal>Session</literal>, as well as database transactions and long conversations."
+msgstr "我们从 <literal>Configuration</literal>层、<literal>SessionFactory</literal> 层,和 <literal>Session</literal> 层开始讨论 Hibernate 的并行控制、数据库事务和应用程序的长事务。 "
#. Tag: title
#, no-c-format
@@ -77,58 +46,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>SessionFactory</literal> is an expensive-to-create, threadsafe "
-"object, intended to be shared by all application threads. It is created "
-"once, usually on application startup, from a <literal>Configuration</"
-"literal> instance."
-msgstr ""
-"<literal>SessionFactory</literal> 对象的创建代价很昂贵,它是线程安全的对象,"
-"它为所有的应用程序线程所共享。它只创建一次,通常是在应用程序启动的时候,由一"
-"个 <literal>Configuraion</literal> 的实例来创建。 "
+msgid "A <literal>SessionFactory</literal> is an expensive-to-create, threadsafe object, intended to be shared by all application threads. It is created once, usually on application startup, from a <literal>Configuration</literal> instance."
+msgstr "<literal>SessionFactory</literal> 对象的创建代价很昂贵,它是线程安全的对象,它为所有的应用程序线程所共享。它只创建一次,通常是在应用程序启动的时候,由一个 <literal>Configuraion</literal> 的实例来创建。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>Session</literal> is an inexpensive, non-threadsafe object that "
-"should be used once and then discarded for: a single request, a conversation "
-"or a single unit of work. A <literal>Session</literal> will not obtain a "
-"JDBC <literal>Connection</literal>, or a <literal>Datasource</literal>, "
-"unless it is needed. It will not consume any resources until used."
-msgstr ""
-"<literal>Session</literal> 对象的创建代价比较小,是非线程安全的,对于单个请"
-"求,单个会话、单个的 工作单元而言,它只被使用一次,然后就丢弃。只有在需要的时"
-"候,一个 <literal>Session</literal> 对象 才会获取一个 JDBC 的 "
-"<literal>Connection</literal>(或一个<literal>Datasource</literal>)对象,因"
-"此假若不使用的时候它不消费任何资源。 "
+msgid "A <literal>Session</literal> is an inexpensive, non-threadsafe object that should be used once and then discarded for: a single request, a conversation or a single unit of work. A <literal>Session</literal> will not obtain a JDBC <literal>Connection</literal>, or a <literal>Datasource</literal>, unless it is needed. It will not consume any resources until used."
+msgstr "<literal>Session</literal> 对象的创建代价比较小,是非线程安全的,对于单个请求,单个会话、单个的 工作单元而言,它只被使用一次,然后就丢弃。只有在需要的时候,一个 <literal>Session</literal> 对象 才会获取一个 JDBC 的 <literal>Connection</literal>(或一个<literal>Datasource</literal>)对象,因此假若不使用的时候它不消费任何资源。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In order to reduce lock contention in the database, a database transaction "
-"has to be as short as possible. Long database transactions will prevent your "
-"application from scaling to a highly concurrent load. It is not recommended "
-"that you hold a database transaction open during user think time until the "
-"unit of work is complete."
-msgstr ""
-"此外我们还要考虑数据库事务。数据库事务应该尽可能的短,降低数据库中的锁争用。"
-"数据库长事务会阻止你的应用程序扩展到高的并发负载。因此,假若在用户思考期间让"
-"数据库事务开着,直到整个工作单元完成才关闭这个事务,这绝不是一个好的设计。 "
+msgid "In order to reduce lock contention in the database, a database transaction has to be as short as possible. Long database transactions will prevent your application from scaling to a highly concurrent load. It is not recommended that you hold a database transaction open during user think time until the unit of work is complete."
+msgstr "此外我们还要考虑数据库事务。数据库事务应该尽可能的短,降低数据库中的锁争用。数据库长事务会阻止你的应用程序扩展到高的并发负载。因此,假若在用户思考期间让数据库事务开着,直到整个工作单元完成才关闭这个事务,这绝不是一个好的设计。 "
#. Tag: para
#, no-c-format
-msgid ""
-"What is the scope of a unit of work? Can a single Hibernate "
-"<literal>Session</literal> span several database transactions, or is this a "
-"one-to-one relationship of scopes? When should you open and close a "
-"<literal>Session</literal> and how do you demarcate the database transaction "
-"boundaries? These questions are addressed in the following sections."
-msgstr ""
-"一个操作单元(Unit of work)的范围是多大?单个的 Hibernate <literal>Session</"
-"literal> 能跨越多个数据库事务吗?还是一个 <literal>Session</literal> 的作用范"
-"围对应一个数据库事务的范围?应该何时打开 <literal>Session</literal>,何时关"
-"闭 <literal>Session</literal>,你又如何划分数据库事务的边界呢?我们将在后续章"
-"节解决这些问题。"
+msgid "What is the scope of a unit of work? Can a single Hibernate <literal>Session</literal> span several database transactions, or is this a one-to-one relationship of scopes? When should you open and close a <literal>Session</literal> and how do you demarcate the database transaction boundaries? These questions are addressed in the following sections."
+msgstr "一个操作单元(Unit of work)的范围是多大?单个的 Hibernate <literal>Session</literal> 能跨越多个数据库事务吗?还是一个 <literal>Session</literal> 的作用范围对应一个数据库事务的范围?应该何时打开 <literal>Session</literal>,何时关闭 <literal>Session</literal>,你又如何划分数据库事务的边界呢?我们将在后续章节解决这些问题。"
#. Tag: title
#, no-c-format
@@ -136,138 +70,34 @@
msgstr "操作单元(Unit of work)"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"First, let's define a unit of work. A unit of work is a design pattern "
-"described by Martin Fowler as <quote> [maintaining] a list of objects "
-"affected by a business transaction and coordinates the writing out of "
-"changes and the resolution of concurrency problems. </quote><citation>PoEAA</"
-"citation> In other words, its a series of operations we wish to carry out "
-"against the database together. Basically, it is a transaction, though "
-"fulfilling a unit of work will often span multiple physical database "
-"transactions (see <xref linkend=\"transactions-basics-apptx\" />). So really "
-"we are talking about a more abstract notion of a transaction. The term "
-"\"business transaction\" is also sometimes used in lieu of unit of work."
-msgstr ""
-"首先,让我们定义一个工作单元(unit of work)。工作单元是一个设计模式,Martin "
-"Fowler 把它描述为 <quote> [maintaining] a list of objects affected by a "
-"business transaction and coordinates the writing out of changes and the "
-"resolution of concurrency problems. </quote><citation>PoEAA</citation> 换句话"
-"说,它是我们希望对数据库执行的一系列操作。基本上,它是一个事务,虽然完成一个"
-"工作单元经常将跨越多个物理数据库事务(请参考 <xref linkend=\"transactions-"
-"basics-apptx\"/>)。所以,实际上我们在讨论一个更抽象的事务概念。术语 "
-"\"business transaction\" 有时也和工作单元一起使用。 "
+#, no-c-format
+msgid "First, let's define a unit of work. A unit of work is a design pattern described by Martin Fowler as <quote> [maintaining] a list of objects affected by a business transaction and coordinates the writing out of changes and the resolution of concurrency problems. </quote><citation>PoEAA</citation> In other words, its a series of operations we wish to carry out against the database together. Basically, it is a transaction, though fulfilling a unit of work will often span multiple physical database transactions (see <xref linkend=\"transactions-basics-apptx\" />). So really we are talking about a more abstract notion of a transaction. The term \"business transaction\" is also sometimes used in lieu of unit of work."
+msgstr "首先,让我们定义一个工作单元(unit of work)。工作单元是一个设计模式,Martin Fowler 把它描述为 <quote> [maintaining] a list of objects affected by a business transaction and coordinates the writing out of changes and the resolution of concurrency problems. </quote><citation>PoEAA</citation> 换句话说,它是我们希望对数据库执行的一系列操作。基本上,它是一个事务,虽然完成一个工作单元经常将跨越多个物理数据库事务(请参考 <xref linkend=\"transactions-basics-apptx\"/>)。所以,实际上我们在讨论一个更抽象的事务概念。术语 \"business transaction\" 有时也和工作单元一起使用。"
#. Tag: para
#, no-c-format
-msgid ""
-"Do not use the <emphasis>session-per-operation</emphasis> antipattern: do "
-"not open and close a <literal>Session</literal> for every simple database "
-"call in a single thread. The same is true for database transactions. "
-"Database calls in an application are made using a planned sequence; they are "
-"grouped into atomic units of work. This also means that auto-commit after "
-"every single SQL statement is useless in an application as this mode is "
-"intended for ad-hoc SQL console work. Hibernate disables, or expects the "
-"application server to disable, auto-commit mode immediately. Database "
-"transactions are never optional. All communication with a database has to "
-"occur inside a transaction. Auto-commit behavior for reading data should be "
-"avoided, as many small transactions are unlikely to perform better than one "
-"clearly defined unit of work. The latter is also more maintainable and "
-"extensible."
-msgstr ""
-"首先,别用 <emphasis>session-per-operation</emphasis> 这种反模式了,也就是"
-"说,在单个线程中, 不要因为一次简单的数据库调用,就打开和关闭一次 "
-"<literal>Session</literal>!数据库事务也是如此。 应用程序中的数据库调用是按照"
-"计划好的次序,分组为原子的操作单元。(注意,这也意味着,应用程 序中,在单个"
-"的 SQL 语句发送之后,自动事务提交(auto-commit)模式失效了。这种模式专门为SQL"
-"控制台操作设计的。 Hibernate 禁止立即自动事务提交模式,或者期望应用服务器禁止"
-"立即自动事务提交模式。)数据库事务绝不是可有可无的,任何与数据库之间的通讯都"
-"必须在某个事务中进行,不管你是在读还是在写数据。对读数据而言,应该避免 auto-"
-"commit 行为,因为很多小的事务比一个清晰定义的工作单元性能差。后者也更容易维护"
-"和扩展。 "
+msgid "Do not use the <emphasis>session-per-operation</emphasis> antipattern: do not open and close a <literal>Session</literal> for every simple database call in a single thread. The same is true for database transactions. Database calls in an application are made using a planned sequence; they are grouped into atomic units of work. This also means that auto-commit after every single SQL statement is useless in an application as this mode is intended for ad-hoc SQL console work. Hibernate disables, or expects the application server to disable, auto-commit mode immediately. Database transactions are never optional. All communication with a database has to occur inside a transaction. Auto-commit behavior for reading data should be avoided, as many small transactions are unlikely to perform better than one clearly defined unit of work. The latter is also more maintainable and extensible."
+msgstr "首先,别用 <emphasis>session-per-operation</emphasis> 这种反模式了,也就是说,在单个线程中, 不要因为一次简单的数据库调用,就打开和关闭一次 <literal>Session</literal>!数据库事务也是如此。 应用程序中的数据库调用是按照计划好的次序,分组为原子的操作单元。(注意,这也意味着,应用程 序中,在单个的 SQL 语句发送之后,自动事务提交(auto-commit)模式失效了。这种模式专门为SQL控制台操作设计的。 Hibernate 禁止立即自动事务提交模式,或者期望应用服务器禁止立即自动事务提交模式。)数据库事务绝不是可有可无的,任何与数据库之间的通讯都必须在某个事务中进行,不管你是在读还是在写数据。对读数据而言,应该避免 auto-commit 行为,因为很多小的事务比一个清晰定义的工作单元性能差。后者也更容易维护和扩展。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The most common pattern in a multi-user client/server application is "
-"<emphasis>session-per-request</emphasis>. In this model, a request from the "
-"client is sent to the server, where the Hibernate persistence layer runs. A "
-"new Hibernate <literal>Session</literal> is opened, and all database "
-"operations are executed in this unit of work. On completion of the work, and "
-"once the response for the client has been prepared, the session is flushed "
-"and closed. Use a single database transaction to serve the clients request, "
-"starting and committing it when you open and close the <literal>Session</"
-"literal>. The relationship between the two is one-to-one and this model is a "
-"perfect fit for many applications."
-msgstr ""
-"在多用户的 client/server 应用程序中,最常用的模式是 <emphasis>每个请求一个会"
-"话(session-per-request)</emphasis>。 在这种模式下,来自客户端的请求被发送到"
-"服务器端(即 Hibernate 持久化层运行的地方),一个新的 Hibernate "
-"<literal>Session</literal> 被打开,并且执行这个操作单元中所有的数据库操作。一"
-"旦操作完成(同时对客户端的响应也准备就绪),session 被同步,然后关闭。你也可"
-"以使用单 个数据库事务来处理客户端请求,在你打开 <literal>Session</literal> 之"
-"后启动事务,在你关闭 <literal>Session</literal> 之前提交事务。会话和请求之间"
-"的关系是一对一的关系,这种模式对 于大多数应用程序来说是很棒的。 "
+msgid "The most common pattern in a multi-user client/server application is <emphasis>session-per-request</emphasis>. In this model, a request from the client is sent to the server, where the Hibernate persistence layer runs. A new Hibernate <literal>Session</literal> is opened, and all database operations are executed in this unit of work. On completion of the work, and once the response for the client has been prepared, the session is flushed and closed. Use a single database transaction to serve the clients request, starting and committing it when you open and close the <literal>Session</literal>. The relationship between the two is one-to-one and this model is a perfect fit for many applications."
+msgstr "在多用户的 client/server 应用程序中,最常用的模式是 <emphasis>每个请求一个会话(session-per-request)</emphasis>。 在这种模式下,来自客户端的请求被发送到服务器端(即 Hibernate 持久化层运行的地方),一个新的 Hibernate <literal>Session</literal> 被打开,并且执行这个操作单元中所有的数据库操作。一旦操作完成(同时对客户端的响应也准备就绪),session 被同步,然后关闭。你也可以使用单 个数据库事务来处理客户端请求,在你打开 <literal>Session</literal> 之后启动事务,在你关闭 <literal>Session</literal> 之前提交事务。会话和请求之间的关系是一对一的关系,这种模式对 于大多数应用程序来说是很棒的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The challenge lies in the implementation. Hibernate provides built-in "
-"management of the \"current session\" to simplify this pattern. Start a "
-"transaction when a server request has to be processed, and end the "
-"transaction before the response is sent to the client. Common solutions are "
-"<literal>ServletFilter</literal>, AOP interceptor with a pointcut on the "
-"service methods, or a proxy/interception container. An EJB container is a "
-"standardized way to implement cross-cutting aspects such as transaction "
-"demarcation on EJB session beans, declaratively with CMT. If you use "
-"programmatic transaction demarcation, for ease of use and code portability "
-"use the Hibernate <literal>Transaction</literal> API shown later in this "
-"chapter."
-msgstr ""
-"实现才是真正的挑战。Hibernate 内置了对\"当前 session(current session)\" 的"
-"管理,用于简化此模式。你要做的一切就是在服务器端要处理请求的时候,开启事务,"
-"在响应发送给客户之前结束事务。你可以用任何方式来完成这一操作,通常的方案有 "
-"<literal>ServletFilter</literal>,在 service 方法中进行 pointcut 的 AOP 拦截"
-"器,或者 proxy/interception 容器。EJB 容器是实现横切诸如 EJB session bean 上"
-"的事务分界,用 CMT 对事务进行声明等方面的标准手段。假若你决定使用编程式的事务"
-"分界,请参考本章后面讲到的 Hibernate <literal>Transaction</literal> API,这对"
-"易用性和代码可移植性都有好处。 "
+msgid "The challenge lies in the implementation. Hibernate provides built-in management of the \"current session\" to simplify this pattern. Start a transaction when a server request has to be processed, and end the transaction before the response is sent to the client. Common solutions are <literal>ServletFilter</literal>, AOP interceptor with a pointcut on the service methods, or a proxy/interception container. An EJB container is a standardized way to implement cross-cutting aspects such as transaction demarcation on EJB session beans, declaratively with CMT. If you use programmatic transaction demarcation, for ease of use and code portability use the Hibernate <literal>Transaction</literal> API shown later in this chapter."
+msgstr "实现才是真正的挑战。Hibernate 内置了对\"当前 session(current session)\" 的管理,用于简化此模式。你要做的一切就是在服务器端要处理请求的时候,开启事务,在响应发送给客户之前结束事务。你可以用任何方式来完成这一操作,通常的方案有 <literal>ServletFilter</literal>,在 service 方法中进行 pointcut 的 AOP 拦截器,或者 proxy/interception 容器。EJB 容器是实现横切诸如 EJB session bean 上的事务分界,用 CMT 对事务进行声明等方面的标准手段。假若你决定使用编程式的事务分界,请参考本章后面讲到的 Hibernate <literal>Transaction</literal> API,这对易用性和代码可移植性都有好处。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Your application code can access a \"current session\" to process the "
-"request by calling <literal>sessionFactory.getCurrentSession()</literal>. "
-"You will always get a <literal>Session</literal> scoped to the current "
-"database transaction. This has to be configured for either resource-local or "
-"JTA environments, see <xref linkend=\"architecture-current-session\" />."
-msgstr ""
-"在任何时间,任何地方,你的应用代码可以通过简单的调用<literal>sessionFactory."
-"getCurrentSession()</literal> 来访问\"当前 session\",用于处理请求。你总是会"
-"得到当前数据库事务范围内的 <literal>Session</literal>。在使用本地资源或 JTA "
-"环境时,必须配置它,请参见 <xref linkend=\"architecture-current-session\"/"
-">。 "
+#, no-c-format
+msgid "Your application code can access a \"current session\" to process the request by calling <literal>sessionFactory.getCurrentSession()</literal>. You will always get a <literal>Session</literal> scoped to the current database transaction. This has to be configured for either resource-local or JTA environments, see <xref linkend=\"architecture-current-session\" />."
+msgstr "在任何时间,任何地方,你的应用代码可以通过简单的调用<literal>sessionFactory.getCurrentSession()</literal> 来访问\"当前 session\",用于处理请求。你总是会得到当前数据库事务范围内的 <literal>Session</literal>。在使用本地资源或 JTA 环境时,必须配置它,请参见 <xref linkend=\"architecture-current-session\"/>。"
#. Tag: para
#, no-c-format
-msgid ""
-"You can extend the scope of a <literal>Session</literal> and database "
-"transaction until the \"view has been rendered\". This is especially useful "
-"in servlet applications that utilize a separate rendering phase after the "
-"request has been processed. Extending the database transaction until view "
-"rendering, is achieved by implementing your own interceptor. However, this "
-"will be difficult if you rely on EJBs with container-managed transactions. A "
-"transaction will be completed when an EJB method returns, before rendering "
-"of any view can start. See the Hibernate website and forum for tips and "
-"examples relating to this <emphasis>Open Session in View</emphasis> pattern."
-msgstr ""
-"有时,将 <literal>Session</literal> 和数据库事务的边界延伸到\"展示层被渲染后"
-"\"会带来便利。有些 serlvet 应用程序在对请求进行处理后,有个单独的渲染期,这种"
-"延伸对这种程序特别有用。假若你实现你自己的拦截器,把事务边界延伸到展示层渲染"
-"结束后非常容易。然而,假若你依赖有容器管理事务的 EJB,这就不太容易了,因为事"
-"务会在 EJB 方法返回后结束,而那是在任何展示层渲染开始之前。请访问 Hibernate "
-"网站和论坛,你可以找到 <emphasis>Open Session in View</emphasis> 这一模式的提"
-"示和示例。 "
+msgid "You can extend the scope of a <literal>Session</literal> and database transaction until the \"view has been rendered\". This is especially useful in servlet applications that utilize a separate rendering phase after the request has been processed. Extending the database transaction until view rendering, is achieved by implementing your own interceptor. However, this will be difficult if you rely on EJBs with container-managed transactions. A transaction will be completed when an EJB method returns, before rendering of any view can start. See the Hibernate website and forum for tips and examples relating to this <emphasis>Open Session in View</emphasis> pattern."
+msgstr "有时,将 <literal>Session</literal> 和数据库事务的边界延伸到\"展示层被渲染后\"会带来便利。有些 serlvet 应用程序在对请求进行处理后,有个单独的渲染期,这种延伸对这种程序特别有用。假若你实现你自己的拦截器,把事务边界延伸到展示层渲染结束后非常容易。然而,假若你依赖有容器管理事务的 EJB,这就不太容易了,因为事务会在 EJB 方法返回后结束,而那是在任何展示层渲染开始之前。请访问 Hibernate 网站和论坛,你可以找到 <emphasis>Open Session in View</emphasis> 这一模式的提示和示例。 "
#. Tag: title
#, no-c-format
@@ -276,141 +106,53 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The session-per-request pattern is not the only way of designing units of "
-"work. Many business processes require a whole series of interactions with "
-"the user that are interleaved with database accesses. In web and enterprise "
-"applications, it is not acceptable for a database transaction to span a user "
-"interaction. Consider the following example:"
-msgstr ""
-"session-per-request 模式不仅仅是一个可以用来设计操作单元的有用概念。很多业务"
-"处理都需 要一系列完整的与用户之间的交互,而这些用户是指对数据库有交叉访问的用"
-"户。在基于 web 的应用和企业应用中,跨用户交互的数据库事务是无法接受的。考虑下"
-"面的例子: "
+msgid "The session-per-request pattern is not the only way of designing units of work. Many business processes require a whole series of interactions with the user that are interleaved with database accesses. In web and enterprise applications, it is not acceptable for a database transaction to span a user interaction. Consider the following example:"
+msgstr "session-per-request 模式不仅仅是一个可以用来设计操作单元的有用概念。很多业务处理都需 要一系列完整的与用户之间的交互,而这些用户是指对数据库有交叉访问的用户。在基于 web 的应用和企业应用中,跨用户交互的数据库事务是无法接受的。考虑下面的例子: "
#. Tag: para
#, no-c-format
-msgid ""
-"The first screen of a dialog opens. The data seen by the user has been "
-"loaded in a particular <literal>Session</literal> and database transaction. "
-"The user is free to modify the objects."
-msgstr ""
-"在界面的第一屏,打开对话框,用户所看到的数据是被一个特定的 <literal>Session</"
-"literal> 和数据 库事务载入(load)的。用户可以随意修改对话框中的数据对象。 "
+msgid "The first screen of a dialog opens. The data seen by the user has been loaded in a particular <literal>Session</literal> and database transaction. The user is free to modify the objects."
+msgstr "在界面的第一屏,打开对话框,用户所看到的数据是被一个特定的 <literal>Session</literal> 和数据 库事务载入(load)的。用户可以随意修改对话框中的数据对象。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The user clicks \"Save\" after 5 minutes and expects their modifications to "
-"be made persistent. The user also expects that they were the only person "
-"editing this information and that no conflicting modification has occurred."
-msgstr ""
-"5 分钟后,用户点击“保存”,期望所做出的修改被持久化;同时他也期望自己是唯一修"
-"改这个信息的人,不会出现修改冲突。 "
+msgid "The user clicks \"Save\" after 5 minutes and expects their modifications to be made persistent. The user also expects that they were the only person editing this information and that no conflicting modification has occurred."
+msgstr "5 分钟后,用户点击“保存”,期望所做出的修改被持久化;同时他也期望自己是唯一修改这个信息的人,不会出现修改冲突。 "
#. Tag: para
#, no-c-format
-msgid ""
-"From the point of view of the user, we call this unit of work a long-running "
-"<emphasis>conversation</emphasis> or <emphasis>application transaction</"
-"emphasis>. There are many ways to implement this in your application."
-msgstr ""
-"从用户的角度来看,我们把这个操作单元称为长时间运行的<emphasis>对话</emphasis>"
-"(conversation),或者<emphasis>应用事务</emphasis>(application "
-"transaction)。在你的应用程序中,可以有很多种方法来实现它。"
+msgid "From the point of view of the user, we call this unit of work a long-running <emphasis>conversation</emphasis> or <emphasis>application transaction</emphasis>. There are many ways to implement this in your application."
+msgstr "从用户的角度来看,我们把这个操作单元称为长时间运行的<emphasis>对话</emphasis>(conversation),或者<emphasis>应用事务</emphasis>(application transaction)。在你的应用程序中,可以有很多种方法来实现它。"
#. Tag: para
#, no-c-format
-msgid ""
-"A first naive implementation might keep the <literal>Session</literal> and "
-"database transaction open during user think time, with locks held in the "
-"database to prevent concurrent modification and to guarantee isolation and "
-"atomicity. This is an anti-pattern, since lock contention would not allow "
-"the application to scale with the number of concurrent users."
-msgstr ""
-"头一个幼稚的做法是,在用户思考的过程中,保持 <literal>Session</literal> 和数"
-"据库事务是打开的,保持数据库锁定,以阻止并发修改,从而保证数据库事务隔离级别"
-"和原子操作。这种方式当然是一个反模式,因为锁争用会导致应用程序无法扩展并发用"
-"户的数目。 "
+msgid "A first naive implementation might keep the <literal>Session</literal> and database transaction open during user think time, with locks held in the database to prevent concurrent modification and to guarantee isolation and atomicity. This is an anti-pattern, since lock contention would not allow the application to scale with the number of concurrent users."
+msgstr "头一个幼稚的做法是,在用户思考的过程中,保持 <literal>Session</literal> 和数据库事务是打开的,保持数据库锁定,以阻止并发修改,从而保证数据库事务隔离级别和原子操作。这种方式当然是一个反模式,因为锁争用会导致应用程序无法扩展并发用户的数目。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You have to use several database transactions to implement the conversation. "
-"In this case, maintaining isolation of business processes becomes the "
-"partial responsibility of the application tier. A single conversation "
-"usually spans several database transactions. It will be atomic if only one "
-"of these database transactions (the last one) stores the updated data. All "
-"others simply read data (for example, in a wizard-style dialog spanning "
-"several request/response cycles). This is easier to implement than it might "
-"sound, especially if you utilize some of Hibernate's features:"
-msgstr ""
-"很明显,我们必须使用多个数据库事务来实现这个对话。在这个例子中,维护业务处理"
-"的 事务隔离变成了应用程序层的部分责任。一个对话通常跨越多个数据库事务。如果仅"
-"仅只有一个数据库事务(最后的那个事务)保存更新过的数据,而所有其他事务只是单"
-"纯的读取数据(例如在一个跨越多个请求/响应周期的向导风格的对话框中),那么应用"
-"程序事务将保证其原子性。这种方式比听起来还要容易实现,特别是当你使用了 "
-"Hibernate 的下述特性的时候: "
+msgid "You have to use several database transactions to implement the conversation. In this case, maintaining isolation of business processes becomes the partial responsibility of the application tier. A single conversation usually spans several database transactions. It will be atomic if only one of these database transactions (the last one) stores the updated data. All others simply read data (for example, in a wizard-style dialog spanning several request/response cycles). This is easier to implement than it might sound, especially if you utilize some of Hibernate's features:"
+msgstr "很明显,我们必须使用多个数据库事务来实现这个对话。在这个例子中,维护业务处理的 事务隔离变成了应用程序层的部分责任。一个对话通常跨越多个数据库事务。如果仅仅只有一个数据库事务(最后的那个事务)保存更新过的数据,而所有其他事务只是单纯的读取数据(例如在一个跨越多个请求/响应周期的向导风格的对话框中),那么应用程序事务将保证其原子性。这种方式比听起来还要容易实现,特别是当你使用了 Hibernate 的下述特性的时候: "
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Automatic Versioning</emphasis>: Hibernate can perform automatic "
-"optimistic concurrency control for you. It can automatically detect if a "
-"concurrent modification occurred during user think time. Check for this at "
-"the end of the conversation."
-msgstr ""
-"<emphasis>自动版本化</emphasis>:Hibernate 能够自动进行乐观并发控制,如果在用"
-"户思考的过程中发生并发修改,Hibernate 能够自动检测到。一般我们只在对话结束时"
-"才检查。"
+msgid "<emphasis>Automatic Versioning</emphasis>: Hibernate can perform automatic optimistic concurrency control for you. It can automatically detect if a concurrent modification occurred during user think time. Check for this at the end of the conversation."
+msgstr "<emphasis>自动版本化</emphasis>:Hibernate 能够自动进行乐观并发控制,如果在用户思考的过程中发生并发修改,Hibernate 能够自动检测到。一般我们只在对话结束时才检查。"
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Detached Objects</emphasis>: if you decide to use the "
-"<emphasis>session-per-request</emphasis> pattern, all loaded instances will "
-"be in the detached state during user think time. Hibernate allows you to "
-"reattach the objects and persist the modifications. The pattern is called "
-"<emphasis>session-per-request-with-detached-objects</emphasis>. Automatic "
-"versioning is used to isolate concurrent modifications."
-msgstr ""
-"<emphasis>脱管对象</emphasis>(Detached Objects):如果你决定采用前面已经讨论"
-"过的 <emphasis>session-per-request</emphasis> 模式,所有载入的实例在用户思考"
-"的过程中都处于与 Session 脱离的状态。Hibernate 允许你把与 Session 脱离的对象"
-"重新关联到 Session 上,并且对修改进行持久化,这种模式被称为 "
-"<emphasis>session-per-request-with-detached-objects</emphasis>。自动版本化被"
-"用来隔离并发修改。"
+msgid "<emphasis>Detached Objects</emphasis>: if you decide to use the <emphasis>session-per-request</emphasis> pattern, all loaded instances will be in the detached state during user think time. Hibernate allows you to reattach the objects and persist the modifications. The pattern is called <emphasis>session-per-request-with-detached-objects</emphasis>. Automatic versioning is used to isolate concurrent modifications."
+msgstr "<emphasis>脱管对象</emphasis>(Detached Objects):如果你决定采用前面已经讨论过的 <emphasis>session-per-request</emphasis> 模式,所有载入的实例在用户思考的过程中都处于与 Session 脱离的状态。Hibernate 允许你把与 Session 脱离的对象重新关联到 Session 上,并且对修改进行持久化,这种模式被称为 <emphasis>session-per-request-with-detached-objects</emphasis>。自动版本化被用来隔离并发修改。"
#. Tag: para
#, no-c-format
-msgid ""
-"<emphasis>Extended (or Long) Session</emphasis>: the Hibernate "
-"<literal>Session</literal> can be disconnected from the underlying JDBC "
-"connection after the database transaction has been committed and reconnected "
-"when a new client request occurs. This pattern is known as <emphasis>session-"
-"per-conversation</emphasis> and makes even reattachment unnecessary. "
-"Automatic versioning is used to isolate concurrent modifications and the "
-"<literal>Session</literal> will not be allowed to be flushed automatically, "
-"but explicitly."
-msgstr ""
-"<emphasis>Extended (or Long) Session</emphasis>:Hibernate 的 "
-"<literal>Session</literal> 可以在数据库事务提交之后和底层的 JDBC 连接断开,当"
-"一个新的客户端请求到来的时候,它又重新连接上底层的 JDBC 连接。这种模式被称之"
-"为<emphasis>session-per-conversation</emphasis>,这种情况可 能会造成不必要的 "
-"Session 和 JDBC 连接的重新关联。自动版本化被用来隔离并发修改,"
-"<literal>Session</literal> 通常不允许自动 flush,而是显性地 flush。 "
+msgid "<emphasis>Extended (or Long) Session</emphasis>: the Hibernate <literal>Session</literal> can be disconnected from the underlying JDBC connection after the database transaction has been committed and reconnected when a new client request occurs. This pattern is known as <emphasis>session-per-conversation</emphasis> and makes even reattachment unnecessary. Automatic versioning is used to isolate concurrent modifications and the <literal>Session</literal> will not be allowed to be flushed automatically, but explicitly."
+msgstr "<emphasis>Extended (or Long) Session</emphasis>:Hibernate 的 <literal>Session</literal> 可以在数据库事务提交之后和底层的 JDBC 连接断开,当一个新的客户端请求到来的时候,它又重新连接上底层的 JDBC 连接。这种模式被称之为<emphasis>session-per-conversation</emphasis>,这种情况可 能会造成不必要的 Session 和 JDBC 连接的重新关联。自动版本化被用来隔离并发修改,<literal>Session</literal> 通常不允许自动 flush,而是显性地 flush。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Both <emphasis>session-per-request-with-detached-objects</emphasis> and "
-"<emphasis>session-per-conversation</emphasis> have advantages and "
-"disadvantages. These disadvantages are discussed later in this chapter in "
-"the context of optimistic concurrency control."
-msgstr ""
-"<emphasis>session-per-request-with-detached-objects</emphasis> 和 "
-"<emphasis>session-per-conversation</emphasis> 各有优缺点,我们在本章后面乐观"
-"并发控制那部分再进行讨论。 "
+msgid "Both <emphasis>session-per-request-with-detached-objects</emphasis> and <emphasis>session-per-conversation</emphasis> have advantages and disadvantages. These disadvantages are discussed later in this chapter in the context of optimistic concurrency control."
+msgstr "<emphasis>session-per-request-with-detached-objects</emphasis> 和 <emphasis>session-per-conversation</emphasis> 各有优缺点,我们在本章后面乐观并发控制那部分再进行讨论。 "
#. Tag: title
#, no-c-format
@@ -419,15 +161,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"An application can concurrently access the same persistent state in two "
-"different <literal>Session</literal>s. However, an instance of a persistent "
-"class is never shared between two <literal>Session</literal> instances. It "
-"is for this reason that there are two different notions of identity:"
-msgstr ""
-"应用程序可能在两个不同的 <literal>Session</literal> 中并发访问同一持久化状"
-"态,但是,一个持久化类的实例无法在两个 <literal>Session</literal> 中共享。因"
-"此有两种不同的标识语义: "
+msgid "An application can concurrently access the same persistent state in two different <literal>Session</literal>s. However, an instance of a persistent class is never shared between two <literal>Session</literal> instances. It is for this reason that there are two different notions of identity:"
+msgstr "应用程序可能在两个不同的 <literal>Session</literal> 中并发访问同一持久化状态,但是,一个持久化类的实例无法在两个 <literal>Session</literal> 中共享。因此有两种不同的标识语义: "
#. Tag: term
#, no-c-format
@@ -435,9 +170,9 @@
msgstr "数据库标识"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "<literal>foo.getId().equals( bar.getId() )</literal>"
-msgstr "<literal>foo.getId().equals( bar.getId() )</literal>"
+msgstr "<literal>foo.getId().equals( bar.getId() )</literal> "
#. Tag: term
#, no-c-format
@@ -451,79 +186,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"For objects attached to a <emphasis>particular</emphasis> <literal>Session</"
-"literal> (i.e., in the scope of a <literal>Session</literal>), the two "
-"notions are equivalent and JVM identity for database identity is guaranteed "
-"by Hibernate. While the application might concurrently access the \"same"
-"\" (persistent identity) business object in two different sessions, the two "
-"instances will actually be \"different\" (JVM identity). Conflicts are "
-"resolved using an optimistic approach and automatic versioning at flush/"
-"commit time."
-msgstr ""
-"对于那些关联到 <emphasis>特定</emphasis> <literal>Session</literal>(也就是在"
-"单个 <literal>Session</literal> 的范围内)上的对象来说,这两种标识的语义是等"
-"价的,与数据库标识对应的 JVM 标识是由 Hibernate 来保证的。不过,当应用程序在"
-"两个不同的 session 中并发访问具有同一持久化标识的业务对象实例的时候,这个业务"
-"对象的两个实例事实上是不相同的(从 JVM 识别来看)。这种冲突可以通过在同步和提"
-"交的时候使用自动版本化和乐观锁定方法来解决。 "
+msgid "For objects attached to a <emphasis>particular</emphasis> <literal>Session</literal> (i.e., in the scope of a <literal>Session</literal>), the two notions are equivalent and JVM identity for database identity is guaranteed by Hibernate. While the application might concurrently access the \"same\" (persistent identity) business object in two different sessions, the two instances will actually be \"different\" (JVM identity). Conflicts are resolved using an optimistic approach and automatic versioning at flush/commit time."
+msgstr "对于那些关联到 <emphasis>特定</emphasis> <literal>Session</literal>(也就是在单个 <literal>Session</literal> 的范围内)上的对象来说,这两种标识的语义是等价的,与数据库标识对应的 JVM 标识是由 Hibernate 来保证的。不过,当应用程序在两个不同的 session 中并发访问具有同一持久化标识的业务对象实例的时候,这个业务对象的两个实例事实上是不相同的(从 JVM 识别来看)。这种冲突可以通过在同步和提交的时候使用自动版本化和乐观锁定方法来解决。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This approach leaves Hibernate and the database to worry about concurrency. "
-"It also provides the best scalability, since guaranteeing identity in single-"
-"threaded units of work means that it does not need expensive locking or "
-"other means of synchronization. The application does not need to synchronize "
-"on any business object, as long as it maintains a single thread per "
-"<literal>Session</literal>. Within a <literal>Session</literal> the "
-"application can safely use <literal>==</literal> to compare objects."
-msgstr ""
-"这种方式把关于并发的头疼问题留给了 Hibernate 和数据库;由于在单个线程内,操作"
-"单元中的对象识别不 需要代价昂贵的锁定或其他意义上的同步,因此它同时可以提供最"
-"好的可伸缩性。只要在单个线程只持有一个 <literal>Session</literal>,应用程序就"
-"不需要同步任何业务对象。在 <literal>Session</literal> 的范围内,应用程序可以"
-"放心的使用 <literal>==</literal> 进行对象比较。 "
+msgid "This approach leaves Hibernate and the database to worry about concurrency. It also provides the best scalability, since guaranteeing identity in single-threaded units of work means that it does not need expensive locking or other means of synchronization. The application does not need to synchronize on any business object, as long as it maintains a single thread per <literal>Session</literal>. Within a <literal>Session</literal> the application can safely use <literal>==</literal> to compare objects."
+msgstr "这种方式把关于并发的头疼问题留给了 Hibernate 和数据库;由于在单个线程内,操作单元中的对象识别不 需要代价昂贵的锁定或其他意义上的同步,因此它同时可以提供最好的可伸缩性。只要在单个线程只持有一个 <literal>Session</literal>,应用程序就不需要同步任何业务对象。在 <literal>Session</literal> 的范围内,应用程序可以放心的使用 <literal>==</literal> 进行对象比较。 "
#. Tag: para
#, no-c-format
-msgid ""
-"However, an application that uses <literal>==</literal> outside of a "
-"<literal>Session</literal> might produce unexpected results. This might "
-"occur even in some unexpected places. For example, if you put two detached "
-"instances into the same <literal>Set</literal>, both might have the same "
-"database identity (i.e., they represent the same row). JVM identity, "
-"however, is by definition not guaranteed for instances in a detached state. "
-"The developer has to override the <literal>equals()</literal> and "
-"<literal>hashCode()</literal> methods in persistent classes and implement "
-"their own notion of object equality. There is one caveat: never use the "
-"database identifier to implement equality. Use a business key that is a "
-"combination of unique, usually immutable, attributes. The database "
-"identifier will change if a transient object is made persistent. If the "
-"transient instance (usually together with detached instances) is held in a "
-"<literal>Set</literal>, changing the hashcode breaks the contract of the "
-"<literal>Set</literal>. Attributes for business keys do not have to be as "
-"stable as database primary keys; you only have to guarantee stability as "
-"long as the objects are in the same <literal>Set</literal>. See the "
-"Hibernate website for a more thorough discussion of this issue. Please note "
-"that this is not a Hibernate issue, but simply how Java object identity and "
-"equality has to be implemented."
-msgstr ""
-"不过,应用程序在 <literal>Session</literal> 的外面使用 <literal>==</literal> "
-"进行对象比较可能会 导致无法预期的结果。在一些无法预料的场合,例如,如果你把两"
-"个脱管对象实例放进同一个 <literal>Set</literal> 的时候,就可能发生。这两个对"
-"象实例可能有同一个数据库标识(也就是说, 他们代表了表的同一行数据),从 JVM "
-"标识的定义上来说,对脱管的对象而言,Hibernate 无法保证他们 的的 JVM 标识一"
-"致。开发人员必须覆盖持久化类的 <literal>equals()</literal> 方法和 "
-"<literal>hashCode()</literal> 方法,从而实现自定义的对象相等语义。警告:不要"
-"使用数据库标识来实现对象相等,应该使用业务键值,由唯一的,通常不变的属性组"
-"成。当一个瞬时对象被持久化的时候,它的数据库标识会发生改变。如果一个瞬时对象"
-"(通常也包括脱管对象实例)被放入一个 <literal>Set</literal>,改变它的 "
-"hashcode 会导致与这个 <literal>Set</literal> 的关系中断。虽 然业务键值的属性"
-"不象数据库主键那样稳定不变,但是你只需要保证在同一个 <literal>Set</literal> "
-"中的对象属性的稳定性就足够了。请到 Hibernate 网站去寻求这个问题更多的详细的讨"
-"论。请注意,这不是一个有关 Hibernate 的问题,而仅仅是一个关于 Java 对象标识和"
-"判等行为如何实现的问题。 "
+msgid "However, an application that uses <literal>==</literal> outside of a <literal>Session</literal> might produce unexpected results. This might occur even in some unexpected places. For example, if you put two detached instances into the same <literal>Set</literal>, both might have the same database identity (i.e., they represent the same row). JVM identity, however, is by definition not guaranteed for instances in a detached state. The developer has to override the <literal>equals()</literal> and <literal>hashCode()</literal> methods in persistent classes and implement their own notion of object equality. There is one caveat: never use the database identifier to implement equality. Use a business key that is a combination of unique, usually immutable, attributes. The database identifier will change if a transient object is made persistent. If the transient instance (usually together with detached instances) is held in a <literal>Set</literal>, changing the hashcode bre!
aks the contract of the <literal>Set</literal>. Attributes for business keys do not have to be as stable as database primary keys; you only have to guarantee stability as long as the objects are in the same <literal>Set</literal>. See the Hibernate website for a more thorough discussion of this issue. Please note that this is not a Hibernate issue, but simply how Java object identity and equality has to be implemented."
+msgstr "不过,应用程序在 <literal>Session</literal> 的外面使用 <literal>==</literal> 进行对象比较可能会 导致无法预期的结果。在一些无法预料的场合,例如,如果你把两个脱管对象实例放进同一个 <literal>Set</literal> 的时候,就可能发生。这两个对象实例可能有同一个数据库标识(也就是说, 他们代表了表的同一行数据),从 JVM 标识的定义上来说,对脱管的对象而言,Hibernate 无法保证他们 的的 JVM 标识一致。开发人员必须覆盖持久化类的 <literal>equals()</literal> 方法和 <literal>hashCode()</literal> 方法,从而实现自定义的对象相等语义。警告:不要使用数据库标识来实现对象相等,应该使用业务键值,由唯一的,通常不变的属性组成。当一个瞬时对象被持久化的时候,它的数据库标识会发生改变。如果一个瞬时对象(通常也包括脱管对象实例)被放入�!
��个 <literal>Set</literal>,改变它的 hashcode 会导致与这个 <literal>Set</literal> 的关系中断。虽 然业务键值的属性不象数据库主键那样稳定不变,但是你只需要保证在同一个 <literal>Set</literal> 中的对象属性的稳定性就足够了。请到 Hibernate 网站去寻求这个问题更多的详细的讨论。请注意,这不是一个有关 Hibernate 的问题,而仅仅是一个关于 Java 对象标识和判等行为如何实现的问题。 "
#. Tag: title
#, no-c-format
@@ -532,79 +206,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Do not use the anti-patterns <emphasis>session-per-user-session</emphasis> "
-"or <emphasis>session-per-application</emphasis> (there are, however, rare "
-"exceptions to this rule). Some of the following issues might also arise "
-"within the recommended patterns, so ensure that you understand the "
-"implications before making a design decision:"
-msgstr ""
-"决不要使用反模式 <emphasis>session-per-user-session</emphasis> 或者 "
-"<emphasis> session-per-application</emphasis>(当然,这个规定几乎没有例外)。"
-"请注意,下述一些问题可能也会出现在我们推荐的模式中,在你作出某个设计决定之"
-"前,请务必理解该模式的应用前提。 "
+msgid "Do not use the anti-patterns <emphasis>session-per-user-session</emphasis> or <emphasis>session-per-application</emphasis> (there are, however, rare exceptions to this rule). Some of the following issues might also arise within the recommended patterns, so ensure that you understand the implications before making a design decision:"
+msgstr "决不要使用反模式 <emphasis>session-per-user-session</emphasis> 或者 <emphasis> session-per-application</emphasis>(当然,这个规定几乎没有例外)。请注意,下述一些问题可能也会出现在我们推荐的模式中,在你作出某个设计决定之前,请务必理解该模式的应用前提。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A <literal>Session</literal> is not thread-safe. Things that work "
-"concurrently, like HTTP requests, session beans, or Swing workers, will "
-"cause race conditions if a <literal>Session</literal> instance is shared. If "
-"you keep your Hibernate <literal>Session</literal> in your "
-"<literal>HttpSession</literal> (this is discussed later in the chapter), you "
-"should consider synchronizing access to your Http session. Otherwise, a user "
-"that clicks reload fast enough can use the same <literal>Session</literal> "
-"in two concurrently running threads."
-msgstr ""
-"<literal>Session</literal> 对象是非线程安全的。如果一个 <literal>Session</"
-"literal> 实例允许共享的话,那些支持并发运行的东东,例如 HTTP request,"
-"session beans 或者是 Swing workers,将会导致出现资源争用(race condition)。"
-"如果在 <literal>HttpSession</literal> 中有 Hibernate 的 <literal>Session</"
-"literal> 的话(稍后讨论),你应该考虑同步访问你的 Http session。 否则,只要用"
-"户足够快的点击浏览器的“刷新”,就会导致两个并发运行线程使用同一个 "
-"<literal>Session</literal>。 "
+msgid "A <literal>Session</literal> is not thread-safe. Things that work concurrently, like HTTP requests, session beans, or Swing workers, will cause race conditions if a <literal>Session</literal> instance is shared. If you keep your Hibernate <literal>Session</literal> in your <literal>HttpSession</literal> (this is discussed later in the chapter), you should consider synchronizing access to your Http session. Otherwise, a user that clicks reload fast enough can use the same <literal>Session</literal> in two concurrently running threads."
+msgstr "<literal>Session</literal> 对象是非线程安全的。如果一个 <literal>Session</literal> 实例允许共享的话,那些支持并发运行的东东,例如 HTTP request,session beans 或者是 Swing workers,将会导致出现资源争用(race condition)。如果在 <literal>HttpSession</literal> 中有 Hibernate 的 <literal>Session</literal> 的话(稍后讨论),你应该考虑同步访问你的 Http session。 否则,只要用户足够快的点击浏览器的“刷新”,就会导致两个并发运行线程使用同一个 <literal>Session</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"An exception thrown by Hibernate means you have to rollback your database "
-"transaction and close the <literal>Session</literal> immediately (this is "
-"discussed in more detail later in the chapter). If your <literal>Session</"
-"literal> is bound to the application, you have to stop the application. "
-"Rolling back the database transaction does not put your business objects "
-"back into the state they were at the start of the transaction. This means "
-"that the database state and the business objects will be out of sync. "
-"Usually this is not a problem, because exceptions are not recoverable and "
-"you will have to start over after rollback anyway."
-msgstr ""
-"一个由 Hibernate 抛出的异常意味着你必须立即回滚数据库事务,并立即关闭 "
-"<literal>Session</literal>(稍后会展开讨论)。如果你的 <literal>Session</"
-"literal> 绑定到一个应用程序上,你必须停止该应用程序。回滚数据库事务并不会把你"
-"的业务对象退回到事务启动时候的状态。这意味着数据库状态和业务对象状态不同步。"
-"通常情况下,这不是什么问题,因为异常是不可恢复的,你必须在回滚之后重新开始执"
-"行。 "
+msgid "An exception thrown by Hibernate means you have to rollback your database transaction and close the <literal>Session</literal> immediately (this is discussed in more detail later in the chapter). If your <literal>Session</literal> is bound to the application, you have to stop the application. Rolling back the database transaction does not put your business objects back into the state they were at the start of the transaction. This means that the database state and the business objects will be out of sync. Usually this is not a problem, because exceptions are not recoverable and you will have to start over after rollback anyway."
+msgstr "一个由 Hibernate 抛出的异常意味着你必须立即回滚数据库事务,并立即关闭 <literal>Session</literal>(稍后会展开讨论)。如果你的 <literal>Session</literal> 绑定到一个应用程序上,你必须停止该应用程序。回滚数据库事务并不会把你的业务对象退回到事务启动时候的状态。这意味着数据库状态和业务对象状态不同步。通常情况下,这不是什么问题,因为异常是不可恢复的,你必须在回滚之后重新开始执行。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The <literal>Session</literal> caches every object that is in a persistent "
-"state (watched and checked for dirty state by Hibernate). If you keep it "
-"open for a long time or simply load too much data, it will grow endlessly "
-"until you get an OutOfMemoryException. One solution is to call <literal>clear"
-"()</literal> and <literal>evict()</literal> to manage the <literal>Session</"
-"literal> cache, but you should consider a Stored Procedure if you need mass "
-"data operations. Some solutions are shown in <xref linkend=\"batch\" />. "
-"Keeping a <literal>Session</literal> open for the duration of a user session "
-"also means a higher probability of stale data."
-msgstr ""
-"<literal>Session</literal> 缓存了处于持久化状态的每个对象(Hibernate 会监视和"
-"检查脏数据)。 这意味着,如果你让 <literal>Session</literal> 打开很长一段时"
-"间,或是仅仅载入了过多的数据, <literal>Session</literal> 占用的内存会一直增"
-"长,直到抛出 OutOfMemoryException 异常。这个 问题的一个解决方法是调用 "
-"<literal>clear()</literal> 和 <literal>evict()</literal> 来管理 "
-"<literal>Session</literal> 的缓存,但是如果你需要大批量数据操作的话,最好考虑"
-"使用存储过程。在<xref linkend=\"batch\"/>中有一些解决方案。在用户会话期间一直"
-"保持 <literal>Session</literal> 打开也意味着出现脏数据的可能性很高。 "
+#, no-c-format
+msgid "The <literal>Session</literal> caches every object that is in a persistent state (watched and checked for dirty state by Hibernate). If you keep it open for a long time or simply load too much data, it will grow endlessly until you get an OutOfMemoryException. One solution is to call <literal>clear()</literal> and <literal>evict()</literal> to manage the <literal>Session</literal> cache, but you should consider a Stored Procedure if you need mass data operations. Some solutions are shown in <xref linkend=\"batch\" />. Keeping a <literal>Session</literal> open for the duration of a user session also means a higher probability of stale data."
+msgstr "<literal>Session</literal> 缓存了处于持久化状态的每个对象(Hibernate 会监视和检查脏数据)。 这意味着,如果你让 <literal>Session</literal> 打开很长一段时间,或是仅仅载入了过多的数据, <literal>Session</literal> 占用的内存会一直增长,直到抛出 OutOfMemoryException 异常。这个 问题的一个解决方法是调用 <literal>clear()</literal> 和 <literal>evict()</literal> 来管理 <literal>Session</literal> 的缓存,但是如果你需要大批量数据操作的话,最好考虑使用存储过程。在<xref linkend=\"batch\"/>中有一些解决方案。在用户会话期间一直保持 <literal>Session</literal> 打开也意味着出现脏数据的可能性很高。 "
#. Tag: title
#, no-c-format
@@ -613,64 +231,22 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Database, or system, transaction boundaries are always necessary. No "
-"communication with the database can occur outside of a database transaction "
-"(this seems to confuse many developers who are used to the auto-commit "
-"mode). Always use clear transaction boundaries, even for read-only "
-"operations. Depending on your isolation level and database capabilities this "
-"might not be required, but there is no downside if you always demarcate "
-"transactions explicitly. Certainly, a single database transaction is going "
-"to perform better than many small transactions, even for reading data."
-msgstr ""
-"数据库(或者系统)事务的声明总是必须的。在数据库事务之外,就无法和数据库通讯"
-"(这可能会让那些习惯于自动提交事务模式的开发人员感到迷惑)。永远使用清晰的事"
-"务声明,即使只读操作也是如此。进行 显式的事务声明并不总是需要的,这取决于你的"
-"事务隔离级别和数据库的能力,但不管怎么说,声明事务总归有益无害。当然,一个单"
-"独的数据库事务总是比很多琐碎的事务性能更好,即时对读数据而言也是一样。 "
+msgid "Database, or system, transaction boundaries are always necessary. No communication with the database can occur outside of a database transaction (this seems to confuse many developers who are used to the auto-commit mode). Always use clear transaction boundaries, even for read-only operations. Depending on your isolation level and database capabilities this might not be required, but there is no downside if you always demarcate transactions explicitly. Certainly, a single database transaction is going to perform better than many small transactions, even for reading data."
+msgstr "数据库(或者系统)事务的声明总是必须的。在数据库事务之外,就无法和数据库通讯(这可能会让那些习惯于自动提交事务模式的开发人员感到迷惑)。永远使用清晰的事务声明,即使只读操作也是如此。进行 显式的事务声明并不总是需要的,这取决于你的事务隔离级别和数据库的能力,但不管怎么说,声明事务总归有益无害。当然,一个单独的数据库事务总是比很多琐碎的事务性能更好,即时对读数据而言也是一样。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A Hibernate application can run in non-managed (i.e., standalone, simple "
-"Web- or Swing applications) and managed J2EE environments. In a non-managed "
-"environment, Hibernate is usually responsible for its own database "
-"connection pool. The application developer has to manually set transaction "
-"boundaries (begin, commit, or rollback database transactions) themselves. A "
-"managed environment usually provides container-managed transactions (CMT), "
-"with the transaction assembly defined declaratively (in deployment "
-"descriptors of EJB session beans, for example). Programmatic transaction "
-"demarcation is then no longer necessary."
-msgstr ""
-"一个 Hibernate 应用程序可以运行在非托管环境中(也就是独立运行的应用程序,简"
-"单 Web 应用程序,或者Swing图形桌面应用程序),也可以运行在托管的 J2EE 环境"
-"中。在一个非托管环境中,Hibernate 通常自己负责管理数据库连接池。应用程序开发"
-"人员必须手工设置事务声明,换句话说,就是手工启 动,提交,或者回滚数据库事务。"
-"一个托管的环境通常提供了容器管理事务(CMT),例如事务装配通过可声明的方式定义"
-"在 EJB session beans 的部署描述符中。可编程式事务声明不再需要,即使是 "
-"<literal>Session</literal> 的同步也可以自动完成。 "
+msgid "A Hibernate application can run in non-managed (i.e., standalone, simple Web- or Swing applications) and managed J2EE environments. In a non-managed environment, Hibernate is usually responsible for its own database connection pool. The application developer has to manually set transaction boundaries (begin, commit, or rollback database transactions) themselves. A managed environment usually provides container-managed transactions (CMT), with the transaction assembly defined declaratively (in deployment descriptors of EJB session beans, for example). Programmatic transaction demarcation is then no longer necessary."
+msgstr "一个 Hibernate 应用程序可以运行在非托管环境中(也就是独立运行的应用程序,简单 Web 应用程序,或者Swing图形桌面应用程序),也可以运行在托管的 J2EE 环境中。在一个非托管环境中,Hibernate 通常自己负责管理数据库连接池。应用程序开发人员必须手工设置事务声明,换句话说,就是手工启 动,提交,或者回滚数据库事务。一个托管的环境通常提供了容器管理事务(CMT),例如事务装配通过可声明的方式定义在 EJB session beans 的部署描述符中。可编程式事务声明不再需要,即使是 <literal>Session</literal> 的同步也可以自动完成。 "
#. Tag: para
#, no-c-format
-msgid ""
-"However, it is often desirable to keep your persistence layer portable "
-"between non-managed resource-local environments, and systems that can rely "
-"on JTA but use BMT instead of CMT. In both cases use programmatic "
-"transaction demarcation. Hibernate offers a wrapper API called "
-"<literal>Transaction</literal> that translates into the native transaction "
-"system of your deployment environment. This API is actually optional, but we "
-"strongly encourage its use unless you are in a CMT session bean."
-msgstr ""
-"让持久层具备可移植性是人们的理想,这种移植发生在非托管的本地资源环境,与依赖 "
-"JTA 但是使用 BMT 而非 CMT 的系统之间。在两种情况下你都可以使用编程式的事务管"
-"理。Hibernate 提供了一套称为 <literal>Transaction</literal> 的封装 API, 用来"
-"把你的部署环境中的本地事务管理系统转换到 Hibernate 事务上。这个 API 是可选"
-"的,但是我们强烈推荐你使用,除非你用 CMT session bean。 "
+msgid "However, it is often desirable to keep your persistence layer portable between non-managed resource-local environments, and systems that can rely on JTA but use BMT instead of CMT. In both cases use programmatic transaction demarcation. Hibernate offers a wrapper API called <literal>Transaction</literal> that translates into the native transaction system of your deployment environment. This API is actually optional, but we strongly encourage its use unless you are in a CMT session bean."
+msgstr "让持久层具备可移植性是人们的理想,这种移植发生在非托管的本地资源环境,与依赖 JTA 但是使用 BMT 而非 CMT 的系统之间。在两种情况下你都可以使用编程式的事务管理。Hibernate 提供了一套称为 <literal>Transaction</literal> 的封装 API, 用来把你的部署环境中的本地事务管理系统转换到 Hibernate 事务上。这个 API 是可选的,但是我们强烈推荐你使用,除非你用 CMT session bean。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Ending a <literal>Session</literal> usually involves four distinct phases:"
+msgid "Ending a <literal>Session</literal> usually involves four distinct phases:"
msgstr "通常情况下,结束 <literal>Session</literal> 包含了四个不同的阶段: "
#. Tag: para
@@ -695,13 +271,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"We discussed Flushing the session earlier, so we will now have a closer look "
-"at transaction demarcation and exception handling in both managed and non-"
-"managed environments."
-msgstr ""
-"session 的同步(flush,刷出)前面已经讨论过了,我们现在进一步考察在托管和非托"
-"管环境下的事务声明和异常处理。 "
+msgid "We discussed Flushing the session earlier, so we will now have a closer look at transaction demarcation and exception handling in both managed and non-managed environments."
+msgstr "session 的同步(flush,刷出)前面已经讨论过了,我们现在进一步考察在托管和非托管环境下的事务声明和异常处理。 "
#. Tag: title
#, no-c-format
@@ -710,73 +281,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If a Hibernate persistence layer runs in a non-managed environment, database "
-"connections are usually handled by simple (i.e., non-DataSource) connection "
-"pools from which Hibernate obtains connections as needed. The session/"
-"transaction handling idiom looks like this:"
-msgstr ""
-"如果 Hibernat 持久层运行在一个非托管环境中,数据库连接通常由 Hibernate 的简单"
-"(即非 DataSource)连接池机制 来处理。session/transaction 处理方式如下所示: "
+msgid "If a Hibernate persistence layer runs in a non-managed environment, database connections are usually handled by simple (i.e., non-DataSource) connection pools from which Hibernate obtains connections as needed. The session/transaction handling idiom looks like this:"
+msgstr "如果 Hibernat 持久层运行在一个非托管环境中,数据库连接通常由 Hibernate 的简单(即非 DataSource)连接池机制 来处理。session/transaction 处理方式如下所示: "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"You do not have to <literal>flush()</literal> the <literal>Session</literal> "
-"explicitly: the call to <literal>commit()</literal> automatically triggers "
-"the synchronization depending on the <link linkend=\"objectstate-flushing"
-"\">FlushMode</link> for the session. A call to <literal>close()</literal> "
-"marks the end of a session. The main implication of <literal>close()</"
-"literal> is that the JDBC connection will be relinquished by the session. "
-"This Java code is portable and runs in both non-managed and JTA environments."
-msgstr ""
-"你不需要显式 <literal>flush()</literal> <literal>Session</literal> — 对 "
-"<literal>commit()</literal> 的调用会自动触发 session 的同步(取决于 session "
-"的 <xref linkend=\"objectstate-flushing\" />)。调用 <literal>close()</"
-"literal> 标志 session 的结束。<literal>close()</literal> 方法重要的暗示是,"
-"<literal>session</literal> 释放了 JDBC 连接。这段 Java 代码在非托管环境下和 "
-"JTA 环境下都可以运行。"
+#, no-c-format
+msgid "You do not have to <literal>flush()</literal> the <literal>Session</literal> explicitly: the call to <literal>commit()</literal> automatically triggers the synchronization depending on the <link linkend=\"objectstate-flushing\">FlushMode</link> for the session. A call to <literal>close()</literal> marks the end of a session. The main implication of <literal>close()</literal> is that the JDBC connection will be relinquished by the session. This Java code is portable and runs in both non-managed and JTA environments."
+msgstr "你不需要显式 <literal>flush()</literal> <literal>Session</literal> — 对 <literal>commit()</literal> 的调用会自动触发 session 的同步(取决于 session 的 <xref linkend=\"objectstate-flushing\" />)。调用 <literal>close()</literal> 标志 session 的结束。<literal>close()</literal> 方法重要的暗示是,<literal>session</literal> 释放了 JDBC 连接。这段 Java 代码在非托管环境下和 JTA 环境下都可以运行。 "
#. Tag: para
#, no-c-format
-msgid ""
-"As outlined earlier, a much more flexible solution is Hibernate's built-in "
-"\"current session\" context management:"
-msgstr ""
-"更加灵活的方案是 Hibernate 内置的 \"current session\" 上下文管理,前文已经讲"
-"过: "
+msgid "As outlined earlier, a much more flexible solution is Hibernate's built-in \"current session\" context management:"
+msgstr "更加灵活的方案是 Hibernate 内置的 \"current session\" 上下文管理,前文已经讲过: "
#. Tag: para
#, no-c-format
-msgid ""
-"You will not see these code snippets in a regular application; fatal "
-"(system) exceptions should always be caught at the \"top\". In other words, "
-"the code that executes Hibernate calls in the persistence layer, and the "
-"code that handles <literal>RuntimeException</literal> (and usually can only "
-"clean up and exit), are in different layers. The current context management "
-"by Hibernate can significantly simplify this design by accessing a "
-"<literal>SessionFactory</literal>. Exception handling is discussed later in "
-"this chapter."
-msgstr ""
-"你很可能从未在一个通常的应用程序的业务代码中见过这样的代码片断:致命的(系"
-"统)异常应该总是 在应用程序“顶层”被捕获。换句话说,执行 Hibernate 调用的代码"
-"(在持久层)和处理 <literal>RuntimeException</literal> 异常的代码(通常只能清"
-"理和退出应用程序)应该在不同 的应用程序逻辑层。Hibernate 的当前上下文管理可以"
-"极大地简化这一设计,你所有的一切就是 <literal>SessionFactory</literal>。异常"
-"处理将在本章稍后进行讨论。 "
+msgid "You will not see these code snippets in a regular application; fatal (system) exceptions should always be caught at the \"top\". In other words, the code that executes Hibernate calls in the persistence layer, and the code that handles <literal>RuntimeException</literal> (and usually can only clean up and exit), are in different layers. The current context management by Hibernate can significantly simplify this design by accessing a <literal>SessionFactory</literal>. Exception handling is discussed later in this chapter."
+msgstr "你很可能从未在一个通常的应用程序的业务代码中见过这样的代码片断:致命的(系统)异常应该总是 在应用程序“顶层”被捕获。换句话说,执行 Hibernate 调用的代码(在持久层)和处理 <literal>RuntimeException</literal> 异常的代码(通常只能清理和退出应用程序)应该在不同 的应用程序逻辑层。Hibernate 的当前上下文管理可以极大地简化这一设计,你所有的一切就是 <literal>SessionFactory</literal>。异常处理将在本章稍后进行讨论。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You should select <literal>org.hibernate.transaction.JDBCTransactionFactory</"
-"literal>, which is the default, and for the second example select <literal>"
-"\"thread\"</literal> as your <literal>hibernate."
-"current_session_context_class</literal>."
-msgstr ""
-"请注意,你应该选择 <literal>org.hibernate.transaction."
-"JDBCTransactionFactory</literal> (这是默认选项),对第二个例子来说,"
-"<literal>hibernate.current_session_context_class</literal>应该是 <literal>"
-"\"thread\"</literal>。"
+msgid "You should select <literal>org.hibernate.transaction.JDBCTransactionFactory</literal>, which is the default, and for the second example select <literal>\"thread\"</literal> as your <literal>hibernate.current_session_context_class</literal>."
+msgstr "请注意,你应该选择 <literal>org.hibernate.transaction.JDBCTransactionFactory</literal> (这是默认选项),对第二个例子来说,<literal>hibernate.current_session_context_class</literal>应该是 <literal>\"thread\"</literal>。"
#. Tag: title
#, no-c-format
@@ -785,110 +311,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If your persistence layer runs in an application server (for example, behind "
-"EJB session beans), every datasource connection obtained by Hibernate will "
-"automatically be part of the global JTA transaction. You can also install a "
-"standalone JTA implementation and use it without EJB. Hibernate offers two "
-"strategies for JTA integration."
-msgstr ""
-"如果你的持久层运行在一个应用服务器中(例如,在 EJB session beans 的后面),"
-"Hibernate 获取的每个数据源连接将自动成为全局 JTA 事务的一部分。你可以安装一个"
-"独立的 JTA 实现,使用它而不使用 EJB。Hibernate 提供了两种策略进行 JTA 集成。 "
+msgid "If your persistence layer runs in an application server (for example, behind EJB session beans), every datasource connection obtained by Hibernate will automatically be part of the global JTA transaction. You can also install a standalone JTA implementation and use it without EJB. Hibernate offers two strategies for JTA integration."
+msgstr "如果你的持久层运行在一个应用服务器中(例如,在 EJB session beans 的后面),Hibernate 获取的每个数据源连接将自动成为全局 JTA 事务的一部分。你可以安装一个独立的 JTA 实现,使用它而不使用 EJB。Hibernate 提供了两种策略进行 JTA 集成。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you use bean-managed transactions (BMT), Hibernate will tell the "
-"application server to start and end a BMT transaction if you use the "
-"<literal>Transaction</literal> API. The transaction management code is "
-"identical to the non-managed environment."
-msgstr ""
-"如果你使用 bean 管理事务(BMT),可以通过使用 Hibernate 的 "
-"<literal>Transaction</literal> API 来告诉应用服务器启动和结束 BMT 事务。因"
-"此,事务管理代码和在非托管环境下是一样的。 "
+msgid "If you use bean-managed transactions (BMT), Hibernate will tell the application server to start and end a BMT transaction if you use the <literal>Transaction</literal> API. The transaction management code is identical to the non-managed environment."
+msgstr "如果你使用 bean 管理事务(BMT),可以通过使用 Hibernate 的 <literal>Transaction</literal> API 来告诉应用服务器启动和结束 BMT 事务。因此,事务管理代码和在非托管环境下是一样的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"If you want to use a transaction-bound <literal>Session</literal>, that is, "
-"the <literal>getCurrentSession()</literal> functionality for easy context "
-"propagation, use the JTA <literal>UserTransaction</literal> API directly:"
-msgstr ""
-"如果你希望使用与事务绑定的 <literal>Session</literal>,也就是使用 "
-"<literal>getCurrentSession()</literal> 来简化上下文管理,你将不得不直接使用 "
-"JTA <literal>UserTransaction</literal> API。 "
+msgid "If you want to use a transaction-bound <literal>Session</literal>, that is, the <literal>getCurrentSession()</literal> functionality for easy context propagation, use the JTA <literal>UserTransaction</literal> API directly:"
+msgstr "如果你希望使用与事务绑定的 <literal>Session</literal>,也就是使用 <literal>getCurrentSession()</literal> 来简化上下文管理,你将不得不直接使用 JTA <literal>UserTransaction</literal> API。 "
#. Tag: para
#, no-c-format
-msgid ""
-"With CMT, transaction demarcation is completed in session bean deployment "
-"descriptors, not programmatically. The code is reduced to:"
-msgstr ""
-"在 CMT 方式下,事务声明是在 session bean 的部署描述符中,而不需要编程。因此,"
-"代码被简化为:"
+msgid "With CMT, transaction demarcation is completed in session bean deployment descriptors, not programmatically. The code is reduced to:"
+msgstr "在 CMT 方式下,事务声明是在 session bean 的部署描述符中,而不需要编程。因此,代码被简化为:"
#. Tag: para
#, no-c-format
-msgid ""
-"In a CMT/EJB, even rollback happens automatically. An unhandled "
-"<literal>RuntimeException</literal> thrown by a session bean method tells "
-"the container to set the global transaction to rollback. <emphasis>You do "
-"not need to use the Hibernate <literal>Transaction</literal> API at all with "
-"BMT or CMT, and you get automatic propagation of the \"current\" Session "
-"bound to the transaction.</emphasis>"
-msgstr ""
-"在 CMT/EJB 中甚至会自动 rollback,因为假若有未捕获的 "
-"<literal>RuntimeException</literal> 从 session bean 方法中抛出,这就会通知容"
-"器把全局事务回滚。<emphasis>这就意味着,在 BMT 或者 CMT 中,你根本就不需要使"
-"用 Hibernate <literal>Transaction</literal> API,你自动得到了绑定到事务的“当"
-"前” Session。</emphasis>"
+msgid "In a CMT/EJB, even rollback happens automatically. An unhandled <literal>RuntimeException</literal> thrown by a session bean method tells the container to set the global transaction to rollback. <emphasis>You do not need to use the Hibernate <literal>Transaction</literal> API at all with BMT or CMT, and you get automatic propagation of the \"current\" Session bound to the transaction.</emphasis>"
+msgstr "在 CMT/EJB 中甚至会自动 rollback,因为假若有未捕获的 <literal>RuntimeException</literal> 从 session bean 方法中抛出,这就会通知容器把全局事务回滚。<emphasis>这就意味着,在 BMT 或者 CMT 中,你根本就不需要使用 Hibernate <literal>Transaction</literal> API,你自动得到了绑定到事务的“当前” Session。</emphasis>"
#. Tag: para
#, no-c-format
-msgid ""
-"When configuring Hibernate's transaction factory, choose <literal>org."
-"hibernate.transaction.JTATransactionFactory</literal> if you use JTA "
-"directly (BMT), and <literal>org.hibernate.transaction."
-"CMTTransactionFactory</literal> in a CMT session bean. Remember to also set "
-"<literal>hibernate.transaction.manager_lookup_class</literal>. Ensure that "
-"your <literal>hibernate.current_session_context_class</literal> is either "
-"unset (backwards compatibility), or is set to <literal>\"jta\"</literal>."
-msgstr ""
-"注意,当你配置 Hibernate 的 transaction factory 的时候,在直接使用 JTA 的时候"
-"(BMT),你应该选择 <literal>org.hibernate.transaction."
-"JTATransactionFactory</literal>,在 CMT session bean 中选择 <literal>org."
-"hibernate.transaction.CMTTransactionFactory</literal>。记得也要设置 "
-"<literal>hibernate.transaction.manager_lookup_class</literal>。还有,确认你"
-"的 <literal>hibernate.current_session_context_class</literal> 未设置(为了向"
-"下兼容),或者设置为 <literal>\"jta\"</literal>。 "
+msgid "When configuring Hibernate's transaction factory, choose <literal>org.hibernate.transaction.JTATransactionFactory</literal> if you use JTA directly (BMT), and <literal>org.hibernate.transaction.CMTTransactionFactory</literal> in a CMT session bean. Remember to also set <literal>hibernate.transaction.manager_lookup_class</literal>. Ensure that your <literal>hibernate.current_session_context_class</literal> is either unset (backwards compatibility), or is set to <literal>\"jta\"</literal>."
+msgstr "注意,当你配置 Hibernate 的 transaction factory 的时候,在直接使用 JTA 的时候(BMT),你应该选择 <literal>org.hibernate.transaction.JTATransactionFactory</literal>,在 CMT session bean 中选择 <literal>org.hibernate.transaction.CMTTransactionFactory</literal>。记得也要设置 <literal>hibernate.transaction.manager_lookup_class</literal>。还有,确认你的 <literal>hibernate.current_session_context_class</literal> 未设置(为了向下兼容),或者设置为 <literal>\"jta\"</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>getCurrentSession()</literal> operation has one downside in a "
-"JTA environment. There is one caveat to the use of <literal>after_statement</"
-"literal> connection release mode, which is then used by default. Due to a "
-"limitation of the JTA spec, it is not possible for Hibernate to "
-"automatically clean up any unclosed <literal>ScrollableResults</literal> or "
-"<literal>Iterator</literal> instances returned by <literal>scroll()</"
-"literal> or <literal>iterate()</literal>. You <emphasis>must</emphasis> "
-"release the underlying database cursor by calling <literal>ScrollableResults."
-"close()</literal> or <literal>Hibernate.close(Iterator)</literal> explicitly "
-"from a <literal>finally</literal> block. Most applications can easily avoid "
-"using <literal>scroll()</literal> or <literal>iterate()</literal> from the "
-"JTA or CMT code.)"
-msgstr ""
-"<literal>getCurrentSession()</literal>在 JTA 环境中有一个弊端。对 "
-"<literal>after_statement</literal> 连接释放方式有一个警告,这是被默认使用的。"
-"因为 JTA 规范的一个很愚蠢的限制,Hibernate 不可能自动清理任何未关闭的 "
-"<literal>ScrollableResults</literal> 或者<literal>Iterator</literal>,它们是"
-"由 <literal>scroll()</literal> 或 <literal>iterate()</literal> 产生的。你 "
-"<emphasis>must</emphasis> 通过在 <literal>finally</literal> 块中,显式调用 "
-"<literal>ScrollableResults.close()</literal> 或者 <literal>Hibernate.close"
-"(Iterator)</literal> 方法来释放底层数据库游标。(当然,大部分程序完全可以很容"
-"易的避免在 JTA 或 CMT 代码中出现 <literal>scroll()</literal> 或 "
-"<literal>iterate()</literal>。) "
+msgid "The <literal>getCurrentSession()</literal> operation has one downside in a JTA environment. There is one caveat to the use of <literal>after_statement</literal> connection release mode, which is then used by default. Due to a limitation of the JTA spec, it is not possible for Hibernate to automatically clean up any unclosed <literal>ScrollableResults</literal> or <literal>Iterator</literal> instances returned by <literal>scroll()</literal> or <literal>iterate()</literal>. You <emphasis>must</emphasis> release the underlying database cursor by calling <literal>ScrollableResults.close()</literal> or <literal>Hibernate.close(Iterator)</literal> explicitly from a <literal>finally</literal> block. Most applications can easily avoid using <literal>scroll()</literal> or <literal>iterate()</literal> from the JTA or CMT code.)"
+msgstr "<literal>getCurrentSession()</literal>在 JTA 环境中有一个弊端。对 <literal>after_statement</literal> 连接释放方式有一个警告,这是被默认使用的。因为 JTA 规范的一个很愚蠢的限制,Hibernate 不可能自动清理任何未关闭的 <literal>ScrollableResults</literal> 或者<literal>Iterator</literal>,它们是由 <literal>scroll()</literal> 或 <literal>iterate()</literal> 产生的。你 <emphasis>must</emphasis> 通过在 <literal>finally</literal> 块中,显式调用 <literal>ScrollableResults.close()</literal> 或者 <literal>Hibernate.close(Iterator)</literal> 方法来释放底层数据库游标。(当然,大部分程序完全可以很容易的避免在 JTA 或 CMT 代码中出现 <literal>scroll()</literal> 或 <literal>iterate()</literal>。) "
#. Tag: title
#, no-c-format
@@ -897,117 +351,43 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If the <literal>Session</literal> throws an exception, including any "
-"<literal>SQLException</literal>, immediately rollback the database "
-"transaction, call <literal>Session.close()</literal> and discard the "
-"<literal>Session</literal> instance. Certain methods of <literal>Session</"
-"literal> will <emphasis>not</emphasis> leave the session in a consistent "
-"state. No exception thrown by Hibernate can be treated as recoverable. "
-"Ensure that the <literal>Session</literal> will be closed by calling "
-"<literal>close()</literal> in a <literal>finally</literal> block."
-msgstr ""
-"如果 <literal>Session</literal> 抛出异常(包括任何 <literal>SQLException</"
-"literal>),你应该立即回滚数据库事务,调用 <literal>Session.close()</"
-"literal> ,丢弃该 <literal>Session</literal> 实例。<literal>Session</"
-"literal> 的某些方法可能会导致 session 处于不一致的状态。所有由 Hibernate 抛出"
-"的异常都视为不可以恢复的。确保在 <literal>finally</literal> 代码块中调用 "
-"<literal>close()</literal> 方法,以关闭掉 <literal>Session</literal>。"
+msgid "If the <literal>Session</literal> throws an exception, including any <literal>SQLException</literal>, immediately rollback the database transaction, call <literal>Session.close()</literal> and discard the <literal>Session</literal> instance. Certain methods of <literal>Session</literal> will <emphasis>not</emphasis> leave the session in a consistent state. No exception thrown by Hibernate can be treated as recoverable. Ensure that the <literal>Session</literal> will be closed by calling <literal>close()</literal> in a <literal>finally</literal> block."
+msgstr "如果 <literal>Session</literal> 抛出异常(包括任何 <literal>SQLException</literal>),你应该立即回滚数据库事务,调用 <literal>Session.close()</literal> ,丢弃该 <literal>Session</literal> 实例。<literal>Session</literal> 的某些方法可能会导致 session 处于不一致的状态。所有由 Hibernate 抛出的异常都视为不可以恢复的。确保在 <literal>finally</literal> 代码块中调用 <literal>close()</literal> 方法,以关闭掉 <literal>Session</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>HibernateException</literal>, which wraps most of the errors "
-"that can occur in a Hibernate persistence layer, is an unchecked exception. "
-"It was not in older versions of Hibernate. In our opinion, we should not "
-"force the application developer to catch an unrecoverable exception at a low "
-"layer. In most systems, unchecked and fatal exceptions are handled in one of "
-"the first frames of the method call stack (i.e., in higher layers) and "
-"either an error message is presented to the application user or some other "
-"appropriate action is taken. Note that Hibernate might also throw other "
-"unchecked exceptions that are not a <literal>HibernateException</literal>. "
-"These are not recoverable and appropriate action should be taken."
-msgstr ""
-"<literal>HibernateException</literal> 是一个非检查期异常(这不同于 Hibernate "
-"老的版本),它封装了 Hibernate 持久层可能出现的大多数错误。我们的观点是,不应"
-"该强迫应用程序开发人员 在底层捕获无法恢复的异常。在大多数软件系统中,非检查期"
-"异常和致命异常都是在相应方法调用 的堆栈的顶层被处理的(也就是说,在软件上面的"
-"逻辑层),并且提供一个错误信息给应用软件的用户 (或者采取其他某些相应的操"
-"作)。请注意,Hibernate 也有可能抛出其他并不属于 "
-"<literal>HibernateException</literal> 的非检查期异常。这些异常同样也是无法恢"
-"复的,应该 采取某些相应的操作去处理。"
+msgid "The <literal>HibernateException</literal>, which wraps most of the errors that can occur in a Hibernate persistence layer, is an unchecked exception. It was not in older versions of Hibernate. In our opinion, we should not force the application developer to catch an unrecoverable exception at a low layer. In most systems, unchecked and fatal exceptions are handled in one of the first frames of the method call stack (i.e., in higher layers) and either an error message is presented to the application user or some other appropriate action is taken. Note that Hibernate might also throw other unchecked exceptions that are not a <literal>HibernateException</literal>. These are not recoverable and appropriate action should be taken."
+msgstr "<literal>HibernateException</literal> 是一个非检查期异常(这不同于 Hibernate 老的版本),它封装了 Hibernate 持久层可能出现的大多数错误。我们的观点是,不应该强迫应用程序开发人员 在底层捕获无法恢复的异常。在大多数软件系统中,非检查期异常和致命异常都是在相应方法调用 的堆栈的顶层被处理的(也就是说,在软件上面的逻辑层),并且提供一个错误信息给应用软件的用户 (或者采取其他某些相应的操作)。请注意,Hibernate 也有可能抛出其他并不属于 <literal>HibernateException</literal> 的非检查期异常。这些异常同样也是无法恢复的,应该 采取某些相应的操作去处理。"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate wraps <literal>SQLException</literal>s thrown while interacting "
-"with the database in a <literal>JDBCException</literal>. In fact, Hibernate "
-"will attempt to convert the exception into a more meaningful subclass of "
-"<literal>JDBCException</literal>. The underlying <literal>SQLException</"
-"literal> is always available via <literal>JDBCException.getCause()</"
-"literal>. Hibernate converts the <literal>SQLException</literal> into an "
-"appropriate <literal>JDBCException</literal> subclass using the "
-"<literal>SQLExceptionConverter</literal> attached to the "
-"<literal>SessionFactory</literal>. By default, the "
-"<literal>SQLExceptionConverter</literal> is defined by the configured "
-"dialect. However, it is also possible to plug in a custom implementation. "
-"See the javadocs for the <literal>SQLExceptionConverterFactory</literal> "
-"class for details. The standard <literal>JDBCException</literal> subtypes "
-"are:"
-msgstr ""
-"在和数据库进行交互时,Hibernate 把捕获的 <literal>SQLException</literal> 封装"
-"为 Hibernate 的 <literal>JDBCException</literal>。事实上,Hibernate 尝试把异"
-"常转换为更有实际含义的 <literal>JDBCException</literal> 异常的子类。底层的 "
-"<literal>SQLException</literal> 可以通过 <literal>JDBCException.getCause()</"
-"literal> 来得到。Hibernate 通过使用关联到 <literal>SessionFactory</literal> "
-"上的 <literal>SQLExceptionConverter</literal> 来把 <literal>SQLException</"
-"literal> 转换为一个对应的 <literal>JDBCException</literal> 异常的子类。默认情"
-"况下,<literal>SQLExceptionConverter</literal> 可以通过配置 dialect 选项指"
-"定;此外,也可以使用用户自定义的实现类(参考 javadocs "
-"<literal>SQLExceptionConverterFactory</literal> 类来了解详情)。标准的 "
-"<literal>JDBCException</literal> 子类型是: "
+msgid "Hibernate wraps <literal>SQLException</literal>s thrown while interacting with the database in a <literal>JDBCException</literal>. In fact, Hibernate will attempt to convert the exception into a more meaningful subclass of <literal>JDBCException</literal>. The underlying <literal>SQLException</literal> is always available via <literal>JDBCException.getCause()</literal>. Hibernate converts the <literal>SQLException</literal> into an appropriate <literal>JDBCException</literal> subclass using the <literal>SQLExceptionConverter</literal> attached to the <literal>SessionFactory</literal>. By default, the <literal>SQLExceptionConverter</literal> is defined by the configured dialect. However, it is also possible to plug in a custom implementation. See the javadocs for the <literal>SQLExceptionConverterFactory</literal> class for details. The standard <literal>JDBCException</literal> subtypes are:"
+msgstr "在和数据库进行交互时,Hibernate 把捕获的 <literal>SQLException</literal> 封装为 Hibernate 的 <literal>JDBCException</literal>。事实上,Hibernate 尝试把异常转换为更有实际含义的 <literal>JDBCException</literal> 异常的子类。底层的 <literal>SQLException</literal> 可以通过 <literal>JDBCException.getCause()</literal> 来得到。Hibernate 通过使用关联到 <literal>SessionFactory</literal> 上的 <literal>SQLExceptionConverter</literal> 来把 <literal>SQLException</literal> 转换为一个对应的 <literal>JDBCException</literal> 异常的子类。默认情况下,<literal>SQLExceptionConverter</literal> 可以通过配置 dialect 选项指定;此外,也可以使用用户自定义的实现类(参考 javadocs <literal>SQLExceptionConverterFactory</literal> 类来了解详情)。标准的 <literal>JDBCException</literal> 子类型是: "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>JDBCConnectionException</literal>: indicates an error with the "
-"underlying JDBC communication."
-msgstr ""
-"<literal>JDBCConnectionException</literal>:指明底层的 JDBC 通讯出现错误。"
+msgid "<literal>JDBCConnectionException</literal>: indicates an error with the underlying JDBC communication."
+msgstr "<literal>JDBCConnectionException</literal>:指明底层的 JDBC 通讯出现错误。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>SQLGrammarException</literal>: indicates a grammar or syntax "
-"problem with the issued SQL."
-msgstr ""
-"<literal>SQLGrammarException</literal>:指明发送的 SQL 语句的语法或者格式错"
-"误。"
+msgid "<literal>SQLGrammarException</literal>: indicates a grammar or syntax problem with the issued SQL."
+msgstr "<literal>SQLGrammarException</literal>:指明发送的 SQL 语句的语法或者格式错误。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ConstraintViolationException</literal>: indicates some form of "
-"integrity constraint violation."
-msgstr ""
-"<literal>ConstraintViolationException</literal>:指明某种类型的约束违例错误"
+msgid "<literal>ConstraintViolationException</literal>: indicates some form of integrity constraint violation."
+msgstr "<literal>ConstraintViolationException</literal>:指明某种类型的约束违例错误"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockAcquisitionException</literal>: indicates an error acquiring a "
-"lock level necessary to perform the requested operation."
-msgstr ""
-"<literal>LockAcquisitionException</literal>:指明了在执行请求操作时,获取所需"
-"的锁级别时出现的错误。"
+msgid "<literal>LockAcquisitionException</literal>: indicates an error acquiring a lock level necessary to perform the requested operation."
+msgstr "<literal>LockAcquisitionException</literal>:指明了在执行请求操作时,获取所需的锁级别时出现的错误。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>GenericJDBCException</literal>: a generic exception which did not "
-"fall into any of the other categories."
-msgstr ""
-"<literal>GenericJDBCException</literal>:不属于任何其他种类的原生异常。"
+msgid "<literal>GenericJDBCException</literal>: a generic exception which did not fall into any of the other categories."
+msgstr "<literal>GenericJDBCException</literal>:不属于任何其他种类的原生异常。"
#. Tag: title
#, no-c-format
@@ -1016,33 +396,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"An important feature provided by a managed environment like EJB, that is "
-"never provided for non-managed code, is transaction timeout. Transaction "
-"timeouts ensure that no misbehaving transaction can indefinitely tie up "
-"resources while returning no response to the user. Outside a managed (JTA) "
-"environment, Hibernate cannot fully provide this functionality. However, "
-"Hibernate can at least control data access operations, ensuring that "
-"database level deadlocks and queries with huge result sets are limited by a "
-"defined timeout. In a managed environment, Hibernate can delegate "
-"transaction timeout to JTA. This functionality is abstracted by the "
-"Hibernate <literal>Transaction</literal> object."
-msgstr ""
-"EJB 这样的托管环境有一项极为重要的特性,而它从未在非托管环境中提供过,那就是"
-"事务超时。在出现错误的事务行为的时候,超时可以确保不会无限挂起资源、对用户没"
-"有交代。在托管(JTA)环境之外,Hibernate 无法完全提供这一功能。但是,"
-"Hiberante 至少可以控制数据访问,确保数据库级别的死锁,和返回巨大结果集的查询"
-"被限定在一个规定的时间内。在托管环境中,Hibernate 会把事务超时转交给 JTA。这"
-"一功能通过 Hibernate <literal>Transaction</literal> 对象进行抽象。 "
+msgid "An important feature provided by a managed environment like EJB, that is never provided for non-managed code, is transaction timeout. Transaction timeouts ensure that no misbehaving transaction can indefinitely tie up resources while returning no response to the user. Outside a managed (JTA) environment, Hibernate cannot fully provide this functionality. However, Hibernate can at least control data access operations, ensuring that database level deadlocks and queries with huge result sets are limited by a defined timeout. In a managed environment, Hibernate can delegate transaction timeout to JTA. This functionality is abstracted by the Hibernate <literal>Transaction</literal> object."
+msgstr "EJB 这样的托管环境有一项极为重要的特性,而它从未在非托管环境中提供过,那就是事务超时。在出现错误的事务行为的时候,超时可以确保不会无限挂起资源、对用户没有交代。在托管(JTA)环境之外,Hibernate 无法完全提供这一功能。但是,Hiberante 至少可以控制数据访问,确保数据库级别的死锁,和返回巨大结果集的查询被限定在一个规定的时间内。在托管环境中,Hibernate 会把事务超时转交给 JTA。这一功能通过 Hibernate <literal>Transaction</literal> 对象进行抽象。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>setTimeout()</literal> cannot be called in a CMT bean, where "
-"transaction timeouts must be defined declaratively."
-msgstr ""
-"注意 <literal>setTimeout()</literal> 不应该在 CMT bean 中调用,此时事务超时值"
-"应该是被声明式定义的。 "
+msgid "<literal>setTimeout()</literal> cannot be called in a CMT bean, where transaction timeouts must be defined declaratively."
+msgstr "注意 <literal>setTimeout()</literal> 不应该在 CMT bean 中调用,此时事务超时值应该是被声明式定义的。 "
#. Tag: title
#, no-c-format
@@ -1051,20 +411,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The only approach that is consistent with high concurrency and high "
-"scalability, is optimistic concurrency control with versioning. Version "
-"checking uses version numbers, or timestamps, to detect conflicting updates "
-"and to prevent lost updates. Hibernate provides three possible approaches to "
-"writing application code that uses optimistic concurrency. The use cases we "
-"discuss are in the context of long conversations, but version checking also "
-"has the benefit of preventing lost updates in single database transactions."
-msgstr ""
-"唯一能够同时保持高并发和高可伸缩性的方法就是使用带版本化的乐观并发控制。版本"
-"检查使用版本号、 或者时间戳来检测更新冲突(并且防止更新丢失)。Hibernate 为使"
-"用乐观并发控制的代码提供了三种可 能的方法,应用程序在编写这些代码时,可以采用"
-"它们。我们已经在前面应用程序对话那部分展示了 乐观并发控制的应用场景,此外,在"
-"单个数据库事务范围内,版本检查也提供了防止更新丢失的好处。 "
+msgid "The only approach that is consistent with high concurrency and high scalability, is optimistic concurrency control with versioning. Version checking uses version numbers, or timestamps, to detect conflicting updates and to prevent lost updates. Hibernate provides three possible approaches to writing application code that uses optimistic concurrency. The use cases we discuss are in the context of long conversations, but version checking also has the benefit of preventing lost updates in single database transactions."
+msgstr "唯一能够同时保持高并发和高可伸缩性的方法就是使用带版本化的乐观并发控制。版本检查使用版本号、 或者时间戳来检测更新冲突(并且防止更新丢失)。Hibernate 为使用乐观并发控制的代码提供了三种可 能的方法,应用程序在编写这些代码时,可以采用它们。我们已经在前面应用程序对话那部分展示了 乐观并发控制的应用场景,此外,在单个数据库事务范围内,版本检查也提供了防止更新丢失的好处。 "
#. Tag: title
#, no-c-format
@@ -1073,60 +421,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"In an implementation without much help from Hibernate, each interaction with "
-"the database occurs in a new <literal>Session</literal> and the developer is "
-"responsible for reloading all persistent instances from the database before "
-"manipulating them. The application is forced to carry out its own version "
-"checking to ensure conversation transaction isolation. This approach is the "
-"least efficient in terms of database access. It is the approach most similar "
-"to entity EJBs."
-msgstr ""
-"未能充分利用 Hibernate 功能的实现代码中,每次和数据库交互都需要一个新的 "
-"<literal>Session</literal>,而且开发人员必须在显示数据之前从数据库中重新载入"
-"所有的持久化对象实例。这种方式迫使应用程序自己实现版本检查来确保对话事务的隔"
-"离,从数据访问的角度来说是最低效的。这种使用方式和 entity EJB 最相似。 "
+msgid "In an implementation without much help from Hibernate, each interaction with the database occurs in a new <literal>Session</literal> and the developer is responsible for reloading all persistent instances from the database before manipulating them. The application is forced to carry out its own version checking to ensure conversation transaction isolation. This approach is the least efficient in terms of database access. It is the approach most similar to entity EJBs."
+msgstr "未能充分利用 Hibernate 功能的实现代码中,每次和数据库交互都需要一个新的 <literal>Session</literal>,而且开发人员必须在显示数据之前从数据库中重新载入所有的持久化对象实例。这种方式迫使应用程序自己实现版本检查来确保对话事务的隔离,从数据访问的角度来说是最低效的。这种使用方式和 entity EJB 最相似。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>version</literal> property is mapped using <literal><"
-"version></literal>, and Hibernate will automatically increment it during "
-"flush if the entity is dirty."
-msgstr ""
-"<literal>version</literal> 属性使用 <literal><version></literal> 来映"
-"射,如果对象是脏数据,在同步的时候,Hibernate 会自动增加版本号。"
+msgid "The <literal>version</literal> property is mapped using <literal><version></literal>, and Hibernate will automatically increment it during flush if the entity is dirty."
+msgstr "<literal>version</literal> 属性使用 <literal><version></literal> 来映射,如果对象是脏数据,在同步的时候,Hibernate 会自动增加版本号。"
#. Tag: para
#, no-c-format
-msgid ""
-"If you are operating in a low-data-concurrency environment, and do not "
-"require version checking, you can use this approach and skip the version "
-"check. In this case, <emphasis>last commit wins</emphasis> is the default "
-"strategy for long conversations. Be aware that this might confuse the users "
-"of the application, as they might experience lost updates without error "
-"messages or a chance to merge conflicting changes."
-msgstr ""
-"当然,如果你的应用是在一个低数据并发环境下,并不需要版本检查的话,你照样可以"
-"使用这种方式,只不过跳过版本检查就是了。在这种情况下,<emphasis>最晚提交生效"
-"</emphasis> (<emphasis>last commit wins</emphasis>)就是你的长对话的默认处理"
-"策略。请记住这种策略可能会让应用软件的用户感到困惑,因为他们有可能会碰上更新"
-"丢失掉却没有出错信息,或者需要合并更改冲突的情况。 "
+msgid "If you are operating in a low-data-concurrency environment, and do not require version checking, you can use this approach and skip the version check. In this case, <emphasis>last commit wins</emphasis> is the default strategy for long conversations. Be aware that this might confuse the users of the application, as they might experience lost updates without error messages or a chance to merge conflicting changes."
+msgstr "当然,如果你的应用是在一个低数据并发环境下,并不需要版本检查的话,你照样可以使用这种方式,只不过跳过版本检查就是了。在这种情况下,<emphasis>最晚提交生效</emphasis> (<emphasis>last commit wins</emphasis>)就是你的长对话的默认处理策略。请记住这种策略可能会让应用软件的用户感到困惑,因为他们有可能会碰上更新丢失掉却没有出错信息,或者需要合并更改冲突的情况。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Manual version checking is only feasible in trivial circumstances and not "
-"practical for most applications. Often not only single instances, but "
-"complete graphs of modified objects, have to be checked. Hibernate offers "
-"automatic version checking with either an extended <literal>Session</"
-"literal> or detached instances as the design paradigm."
-msgstr ""
-"很明显,手工进行版本检查只适合于某些软件规模非常小的应用场景,对于大多数软件"
-"应用场景来说并不现实。通常情况下,不仅是单个对象实例需要进行版本检查,整个被"
-"修改过的关联对象图也都需要进行版本检查。作为标准设计范例,Hibernate 使用扩展"
-"周期的 <literal>Session</literal> 的方式,或者脱管对象实例的方式来提供自动版"
-"本检查。 "
+msgid "Manual version checking is only feasible in trivial circumstances and not practical for most applications. Often not only single instances, but complete graphs of modified objects, have to be checked. Hibernate offers automatic version checking with either an extended <literal>Session</literal> or detached instances as the design paradigm."
+msgstr "很明显,手工进行版本检查只适合于某些软件规模非常小的应用场景,对于大多数软件应用场景来说并不现实。通常情况下,不仅是单个对象实例需要进行版本检查,整个被修改过的关联对象图也都需要进行版本检查。作为标准设计范例,Hibernate 使用扩展周期的 <literal>Session</literal> 的方式,或者脱管对象实例的方式来提供自动版本检查。 "
#. Tag: title
#, no-c-format
@@ -1135,80 +446,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A single <literal>Session</literal> instance and its persistent instances "
-"that are used for the whole conversation are known as <emphasis>session-per-"
-"conversation</emphasis>. Hibernate checks instance versions at flush time, "
-"throwing an exception if concurrent modification is detected. It is up to "
-"the developer to catch and handle this exception. Common options are the "
-"opportunity for the user to merge changes or to restart the business "
-"conversation with non-stale data."
-msgstr ""
-"单个 <literal>Session</literal> 实例和它所关联的所有持久化对象实例都被用于整"
-"个对话,这被称为 <emphasis>session-per-conversation</emphasis>。Hibernate 在"
-"同步的时候进行对象实例的版本检查,如果检测到并发修改则抛出异常。由开发人员来"
-"决定是否需要捕获和处理这个异常(通常的抉择是给用户 提供一个合并更改,或者在无"
-"脏数据情况下重新进行业务对话的机会)。 "
+msgid "A single <literal>Session</literal> instance and its persistent instances that are used for the whole conversation are known as <emphasis>session-per-conversation</emphasis>. Hibernate checks instance versions at flush time, throwing an exception if concurrent modification is detected. It is up to the developer to catch and handle this exception. Common options are the opportunity for the user to merge changes or to restart the business conversation with non-stale data."
+msgstr "单个 <literal>Session</literal> 实例和它所关联的所有持久化对象实例都被用于整个对话,这被称为 <emphasis>session-per-conversation</emphasis>。Hibernate 在同步的时候进行对象实例的版本检查,如果检测到并发修改则抛出异常。由开发人员来决定是否需要捕获和处理这个异常(通常的抉择是给用户 提供一个合并更改,或者在无脏数据情况下重新进行业务对话的机会)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>Session</literal> is disconnected from any underlying JDBC "
-"connection when waiting for user interaction. This approach is the most "
-"efficient in terms of database access. The application does not version "
-"check or reattach detached instances, nor does it have to reload instances "
-"in every database transaction."
-msgstr ""
-"在等待用户交互的时候, <literal>Session</literal> 断开底层的 JDBC 连接。这种"
-"方式以数据库访问的角度来说是最高效的方式。应用程序不需要关心版本检查或脱管对"
-"象实例的重新关联,在每个数据库事务中,应用程序也不需要载入读取对象实例。 "
+msgid "The <literal>Session</literal> is disconnected from any underlying JDBC connection when waiting for user interaction. This approach is the most efficient in terms of database access. The application does not version check or reattach detached instances, nor does it have to reload instances in every database transaction."
+msgstr "在等待用户交互的时候, <literal>Session</literal> 断开底层的 JDBC 连接。这种方式以数据库访问的角度来说是最高效的方式。应用程序不需要关心版本检查或脱管对象实例的重新关联,在每个数据库事务中,应用程序也不需要载入读取对象实例。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>foo</literal> object knows which <literal>Session</literal> it "
-"was loaded in. Beginning a new database transaction on an old session "
-"obtains a new connection and resumes the session. Committing a database "
-"transaction disconnects a session from the JDBC connection and returns the "
-"connection to the pool. After reconnection, to force a version check on data "
-"you are not updating, you can call <literal>Session.lock()</literal> with "
-"<literal>LockMode.READ</literal> on any objects that might have been updated "
-"by another transaction. You do not need to lock any data that you "
-"<emphasis>are</emphasis> updating. Usually you would set <literal>FlushMode."
-"MANUAL</literal> on an extended <literal>Session</literal>, so that only the "
-"last database transaction cycle is allowed to actually persist all "
-"modifications made in this conversation. Only this last database transaction "
-"will include the <literal>flush()</literal> operation, and then "
-"<literal>close()</literal> the session to end the conversation."
-msgstr ""
-"<literal>foo</literal> 对象知道它是在哪个 <literal>Session</literal> 中被装入"
-"的。在一个旧 session 中开启一个新的数据库事务,会导致 session 获取一个新的连"
-"接,并恢复 session 的功能。将数据库事务提交,使得 session 从 JDBC 连接断开,"
-"并将此连接交还给连接池。在重新连接之后,要强制对你没有更新的数据进行一次版本"
-"检查,你可以对所有可能被其他事务修改过的对象,使用参数 <literal>LockMode."
-"READ</literal> 来调用 <literal>Session.lock()</literal>。你不用 lock 任何你"
-"<emphasis>正在</emphasis>更新的数据。一般你会在扩展的 <literal>Session</"
-"literal> 上设置 <literal>FlushMode.NEVER</literal>,因此只有最后一个数据库事"
-"务循环才会真正的把整个对话中发生的修改发送到数据库。因此,只有这最后一次数据"
-"库事务才会包含 <literal>flush()</literal> 操作,然后在整个对话结束后,还要 "
-"<literal>close()</literal> 这个 session。 "
+msgid "The <literal>foo</literal> object knows which <literal>Session</literal> it was loaded in. Beginning a new database transaction on an old session obtains a new connection and resumes the session. Committing a database transaction disconnects a session from the JDBC connection and returns the connection to the pool. After reconnection, to force a version check on data you are not updating, you can call <literal>Session.lock()</literal> with <literal>LockMode.READ</literal> on any objects that might have been updated by another transaction. You do not need to lock any data that you <emphasis>are</emphasis> updating. Usually you would set <literal>FlushMode.MANUAL</literal> on an extended <literal>Session</literal>, so that only the last database transaction cycle is allowed to actually persist all modifications made in this conversation. Only this last database transaction will include the <literal>flush()</literal> operation, and then <literal>close()</literal> the se!
ssion to end the conversation."
+msgstr "<literal>foo</literal> 对象知道它是在哪个 <literal>Session</literal> 中被装入的。在一个旧 session 中开启一个新的数据库事务,会导致 session 获取一个新的连接,并恢复 session 的功能。将数据库事务提交,使得 session 从 JDBC 连接断开,并将此连接交还给连接池。在重新连接之后,要强制对你没有更新的数据进行一次版本检查,你可以对所有可能被其他事务修改过的对象,使用参数 <literal>LockMode.READ</literal> 来调用 <literal>Session.lock()</literal>。你不用 lock 任何你<emphasis>正在</emphasis>更新的数据。一般你会在扩展的 <literal>Session</literal> 上设置 <literal>FlushMode.NEVER</literal>,因此只有最后一个数据库事务循环才会真正的把整个对话中发生的修改发送到数据库。因此,只有这最后一次数据库事务才会包含 <literal>flush()</literal> 操作,然后在整个对话结束后�!
�还要 <literal>close()</literal> 这个 session。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This pattern is problematic if the <literal>Session</literal> is too big to "
-"be stored during user think time (for example, an <literal>HttpSession</"
-"literal> should be kept as small as possible). As the <literal>Session</"
-"literal> is also the first-level cache and contains all loaded objects, we "
-"can probably use this strategy only for a few request/response cycles. Use a "
-"<literal>Session</literal> only for a single conversation as it will soon "
-"have stale data."
-msgstr ""
-"如果在用户思考的过程中,<literal>Session</literal> 因为太大了而不能保存,那么"
-"这种模式是有问题的。举例来说,一个 <literal>HttpSession</literal> 应该尽可能"
-"的小。由于 <literal>Session</literal> 是一级缓存,并且保持了所有被载入过的对"
-"象,因此我们只应该在那些少量的 request/response 情况下使用这种策略。你应该只"
-"把一个 <literal>Session</literal> 用于单个对话,因为它很快就会出现脏数据。 "
+msgid "This pattern is problematic if the <literal>Session</literal> is too big to be stored during user think time (for example, an <literal>HttpSession</literal> should be kept as small as possible). As the <literal>Session</literal> is also the first-level cache and contains all loaded objects, we can probably use this strategy only for a few request/response cycles. Use a <literal>Session</literal> only for a single conversation as it will soon have stale data."
+msgstr "如果在用户思考的过程中,<literal>Session</literal> 因为太大了而不能保存,那么这种模式是有问题的。举例来说,一个 <literal>HttpSession</literal> 应该尽可能的小。由于 <literal>Session</literal> 是一级缓存,并且保持了所有被载入过的对象,因此我们只应该在那些少量的 request/response 情况下使用这种策略。你应该只把一个 <literal>Session</literal> 用于单个对话,因为它很快就会出现脏数据。 "
#. Tag: title
#, no-c-format
@@ -1217,42 +471,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Earlier versions of Hibernate required explicit disconnection and "
-"reconnection of a <literal>Session</literal>. These methods are deprecated, "
-"as beginning and ending a transaction has the same effect."
-msgstr ""
-"注意,早期的 Hibernate 版本需要明确的对 <literal>Session</literal> 进行 "
-"disconnect 和 reconnect。这些方法现在已经过时了,打开事务和关闭事务会起到同样"
-"的效果。"
+msgid "Earlier versions of Hibernate required explicit disconnection and reconnection of a <literal>Session</literal>. These methods are deprecated, as beginning and ending a transaction has the same effect."
+msgstr "注意,早期的 Hibernate 版本需要明确的对 <literal>Session</literal> 进行 disconnect 和 reconnect。这些方法现在已经过时了,打开事务和关闭事务会起到同样的效果。"
#. Tag: para
#, no-c-format
-msgid ""
-"Keep the disconnected <literal>Session</literal> close to the persistence "
-"layer. Use an EJB stateful session bean to hold the <literal>Session</"
-"literal> in a three-tier environment. Do not transfer it to the web layer, "
-"or even serialize it to a separate tier, to store it in the "
-"<literal>HttpSession</literal>."
-msgstr ""
-"此外,也请注意,你应该让与数据库连接断开的 <literal>Session</literal> 对持久"
-"层保持关闭状态。换句话说,在三层环境中,使用有状态的 EJB session bean 来持 有"
-"<literal>Session</literal>, 而不要把它传递到 web 层(甚至把它序列化到一个单"
-"独的层),保存在 <literal>HttpSession</literal> 中。 "
+msgid "Keep the disconnected <literal>Session</literal> close to the persistence layer. Use an EJB stateful session bean to hold the <literal>Session</literal> in a three-tier environment. Do not transfer it to the web layer, or even serialize it to a separate tier, to store it in the <literal>HttpSession</literal>."
+msgstr "此外,也请注意,你应该让与数据库连接断开的 <literal>Session</literal> 对持久层保持关闭状态。换句话说,在三层环境中,使用有状态的 EJB session bean 来持 有<literal>Session</literal>, 而不要把它传递到 web 层(甚至把它序列化到一个单独的层),保存在 <literal>HttpSession</literal> 中。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The extended session pattern, or <emphasis>session-per-conversation</"
-"emphasis>, is more difficult to implement with automatic current session "
-"context management. You need to supply your own implementation of the "
-"<literal>CurrentSessionContext</literal> for this. See the Hibernate Wiki "
-"for examples."
-msgstr ""
-"扩展 session 模式,或者被称为<emphasis>每次对话一个session(session-per-"
-"conversation)</emphasis>,自动管理当前 session 上下文联用的时候会更困难。你"
-"需要提供你自己的 <literal>CurrentSessionContext</literal> 实现。请参阅 "
-"Hibernate Wiki 以获得示例。 "
+msgid "The extended session pattern, or <emphasis>session-per-conversation</emphasis>, is more difficult to implement with automatic current session context management. You need to supply your own implementation of the <literal>CurrentSessionContext</literal> for this. See the Hibernate Wiki for examples."
+msgstr "扩展 session 模式,或者被称为<emphasis>每次对话一个session(session-per-conversation)</emphasis>,自动管理当前 session 上下文联用的时候会更困难。你需要提供你自己的 <literal>CurrentSessionContext</literal> 实现。请参阅 Hibernate Wiki 以获得示例。 "
#. Tag: title
#, no-c-format
@@ -1261,42 +491,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Each interaction with the persistent store occurs in a new <literal>Session</"
-"literal>. However, the same persistent instances are reused for each "
-"interaction with the database. The application manipulates the state of "
-"detached instances originally loaded in another <literal>Session</literal> "
-"and then reattaches them using <literal>Session.update()</literal>, "
-"<literal>Session.saveOrUpdate()</literal>, or <literal>Session.merge()</"
-"literal>."
-msgstr ""
-"这种方式下,与持久化存储的每次交互都发生在一个新的 <literal>Session</"
-"literal> 中。然而,同一持久化对象实例可以在多次与数据库的交互中重用。应用程序"
-"操纵脱管对象实例 的状态,这个脱管对象实例最初是在另一个 <literal>Session</"
-"literal> 中载入的,然后调用 <literal>Session.update()</literal>,"
-"<literal>Session.saveOrUpdate()</literal>,或者 <literal>Session.merge()</"
-"literal> 来重新关联该对象实例。"
+msgid "Each interaction with the persistent store occurs in a new <literal>Session</literal>. However, the same persistent instances are reused for each interaction with the database. The application manipulates the state of detached instances originally loaded in another <literal>Session</literal> and then reattaches them using <literal>Session.update()</literal>, <literal>Session.saveOrUpdate()</literal>, or <literal>Session.merge()</literal>."
+msgstr "这种方式下,与持久化存储的每次交互都发生在一个新的 <literal>Session</literal> 中。然而,同一持久化对象实例可以在多次与数据库的交互中重用。应用程序操纵脱管对象实例 的状态,这个脱管对象实例最初是在另一个 <literal>Session</literal> 中载入的,然后调用 <literal>Session.update()</literal>,<literal>Session.saveOrUpdate()</literal>,或者 <literal>Session.merge()</literal> 来重新关联该对象实例。"
#. Tag: para
#, no-c-format
-msgid ""
-"Again, Hibernate will check instance versions during flush, throwing an "
-"exception if conflicting updates occurred."
-msgstr ""
-"Hibernate 会再一次在同步的时候检查对象实例的版本,如果发生更新冲突,就抛出异"
-"常。 "
+msgid "Again, Hibernate will check instance versions during flush, throwing an exception if conflicting updates occurred."
+msgstr "Hibernate 会再一次在同步的时候检查对象实例的版本,如果发生更新冲突,就抛出异常。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can also call <literal>lock()</literal> instead of <literal>update()</"
-"literal>, and use <literal>LockMode.READ</literal> (performing a version "
-"check and bypassing all caches) if you are sure that the object has not been "
-"modified."
-msgstr ""
-"如果你确信对象没有被修改过,你也可以调用 <literal>lock()</literal> 来设置 "
-"<literal>LockMode.READ</literal>(绕过所有的缓存,执行版本检查),从而取代 "
-"<literal>update()</literal> 操作。 "
+msgid "You can also call <literal>lock()</literal> instead of <literal>update()</literal>, and use <literal>LockMode.READ</literal> (performing a version check and bypassing all caches) if you are sure that the object has not been modified."
+msgstr "如果你确信对象没有被修改过,你也可以调用 <literal>lock()</literal> 来设置 <literal>LockMode.READ</literal>(绕过所有的缓存,执行版本检查),从而取代 <literal>update()</literal> 操作。 "
#. Tag: title
#, no-c-format
@@ -1305,77 +511,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can disable Hibernate's automatic version increment for particular "
-"properties and collections by setting the <literal>optimistic-lock</literal> "
-"mapping attribute to <literal>false</literal>. Hibernate will then no longer "
-"increment versions if the property is dirty."
-msgstr ""
-"对于特定的属性和集合,通过为它们设置映射属性 <literal>optimistic-lock</"
-"literal> 的值为 <literal>false</literal>,来禁止 Hibernate 的版本自动增加。这"
-"样的话,如果该属性脏数据,Hibernate 将不再增加版本号。 "
+msgid "You can disable Hibernate's automatic version increment for particular properties and collections by setting the <literal>optimistic-lock</literal> mapping attribute to <literal>false</literal>. Hibernate will then no longer increment versions if the property is dirty."
+msgstr "对于特定的属性和集合,通过为它们设置映射属性 <literal>optimistic-lock</literal> 的值为 <literal>false</literal>,来禁止 Hibernate 的版本自动增加。这样的话,如果该属性脏数据,Hibernate 将不再增加版本号。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Legacy database schemas are often static and cannot be modified. Or, other "
-"applications might access the same database and will not know how to handle "
-"version numbers or even timestamps. In both cases, versioning cannot rely on "
-"a particular column in a table. To force a version check with a comparison "
-"of the state of all fields in a row but without a version or timestamp "
-"property mapping, turn on <literal>optimistic-lock=\"all\"</literal> in the "
-"<literal><class></literal> mapping. This conceptually only works if "
-"Hibernate can compare the old and the new state (i.e., if you use a single "
-"long <literal>Session</literal> and not session-per-request-with-detached-"
-"objects)."
-msgstr ""
-"遗留系统的数据库 Schema 通常是静态的,不可修改的。或者,其他应用程序也可能访"
-"问同一数据库,根本无法得知如何处理版本号,甚至时间戳。在以上的所有场景中,实"
-"现版本化不能依靠数据库表的某个特定列。在 <literal><class></literal> 的"
-"映射中设置 <literal>optimistic-lock=\"all\"</literal> 可以在没有版本或者时间"
-"戳属性映射的情况下实现版本检查,此时 Hibernate 将比较一行记录的每个字段的状"
-"态。请注意,只有当 Hibernate 能够比较新旧状态的情况下,这种方式才能生效,也就"
-"是说,你必须使用单个长生命周期 <literal>Session</literal> 模式,而不能使用 "
-"session-per-request-with-detached-objects 模式。 "
+msgid "Legacy database schemas are often static and cannot be modified. Or, other applications might access the same database and will not know how to handle version numbers or even timestamps. In both cases, versioning cannot rely on a particular column in a table. To force a version check with a comparison of the state of all fields in a row but without a version or timestamp property mapping, turn on <literal>optimistic-lock=\"all\"</literal> in the <literal><class></literal> mapping. This conceptually only works if Hibernate can compare the old and the new state (i.e., if you use a single long <literal>Session</literal> and not session-per-request-with-detached-objects)."
+msgstr "遗留系统的数据库 Schema 通常是静态的,不可修改的。或者,其他应用程序也可能访问同一数据库,根本无法得知如何处理版本号,甚至时间戳。在以上的所有场景中,实现版本化不能依靠数据库表的某个特定列。在 <literal><class></literal> 的映射中设置 <literal>optimistic-lock=\"all\"</literal> 可以在没有版本或者时间戳属性映射的情况下实现版本检查,此时 Hibernate 将比较一行记录的每个字段的状态。请注意,只有当 Hibernate 能够比较新旧状态的情况下,这种方式才能生效,也就是说,你必须使用单个长生命周期 <literal>Session</literal> 模式,而不能使用 session-per-request-with-detached-objects 模式。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Concurrent modification can be permitted in instances where the changes that "
-"have been made do not overlap. If you set <literal>optimistic-lock=\"dirty"
-"\"</literal> when mapping the <literal><class></literal>, Hibernate "
-"will only compare dirty fields during flush."
-msgstr ""
-"有些情况下,只要更改不发生交错,并发修改也是允许的。当你在 <literal><"
-"class></literal> 的映射中设置 <literal>optimistic-lock=\"dirty\"</"
-"literal>,Hibernate 在同步的时候将只比较有脏数据的字段。 "
+msgid "Concurrent modification can be permitted in instances where the changes that have been made do not overlap. If you set <literal>optimistic-lock=\"dirty\"</literal> when mapping the <literal><class></literal>, Hibernate will only compare dirty fields during flush."
+msgstr "有些情况下,只要更改不发生交错,并发修改也是允许的。当你在 <literal><class></literal> 的映射中设置 <literal>optimistic-lock=\"dirty\"</literal>,Hibernate 在同步的时候将只比较有脏数据的字段。 "
#. Tag: para
#, no-c-format
-msgid ""
-"In both cases, with dedicated version/timestamp columns or with a full/dirty "
-"field comparison, Hibernate uses a single <literal>UPDATE</literal> "
-"statement, with an appropriate <literal>WHERE</literal> clause, per entity "
-"to execute the version check and update the information. If you use "
-"transitive persistence to cascade reattachment to associated entities, "
-"Hibernate may execute unnecessary updates. This is usually not a problem, "
-"but <emphasis>on update</emphasis> triggers in the database might be "
-"executed even when no changes have been made to detached instances. You can "
-"customize this behavior by setting <literal>select-before-update=\"true\"</"
-"literal> in the <literal><class></literal> mapping, forcing Hibernate "
-"to <literal>SELECT</literal> the instance to ensure that changes did occur "
-"before updating the row."
-msgstr ""
-"在以上所有场景中,不管是专门设置一个版本/时间戳列,还是进行全部字段/脏数据字"
-"段比较,Hibernate 都会针对每个实体对象发送一条 <literal>UPDATE</literal>(带"
-"有相应的 <literal>WHERE</literal> 语句 )的 SQL 语句来执行版本检查和数据更"
-"新。如果你对关联实体 设置级联关系使用传播性持久化(transitive persistence),"
-"那么 Hibernate 可能会执行不必 要的update语句。这通常不是个问题,但是数据库里"
-"面对 <emphasis>on update</emphasis> 点火 的触发器可能在脱管对象没有任何更改的"
-"情况下被触发。因此,你可以在 <literal><class></literal> 的映射中,通过"
-"设置<literal>select-before-update=\"true\"</literal> 来定制这一行为,强制 "
-"Hibernate <literal>SELECT</literal> 这个对象实例,从而保证,在更新记录之前,"
-"对象的确是被修改过。 "
+msgid "In both cases, with dedicated version/timestamp columns or with a full/dirty field comparison, Hibernate uses a single <literal>UPDATE</literal> statement, with an appropriate <literal>WHERE</literal> clause, per entity to execute the version check and update the information. If you use transitive persistence to cascade reattachment to associated entities, Hibernate may execute unnecessary updates. This is usually not a problem, but <emphasis>on update</emphasis> triggers in the database might be executed even when no changes have been made to detached instances. You can customize this behavior by setting <literal>select-before-update=\"true\"</literal> in the <literal><class></literal> mapping, forcing Hibernate to <literal>SELECT</literal> the instance to ensure that changes did occur before updating the row."
+msgstr "在以上所有场景中,不管是专门设置一个版本/时间戳列,还是进行全部字段/脏数据字段比较,Hibernate 都会针对每个实体对象发送一条 <literal>UPDATE</literal>(带有相应的 <literal>WHERE</literal> 语句 )的 SQL 语句来执行版本检查和数据更新。如果你对关联实体 设置级联关系使用传播性持久化(transitive persistence),那么 Hibernate 可能会执行不必 要的update语句。这通常不是个问题,但是数据库里面对 <emphasis>on update</emphasis> 点火 的触发器可能在脱管对象没有任何更改的情况下被触发。因此,你可以在 <literal><class></literal> 的映射中,通过设置<literal>select-before-update=\"true\"</literal> 来定制这一行为,强制 Hibernate <literal>SELECT</literal> 这个对象实例,从而保证,在更新记录之前,对象的确是被修改过。 "
#. Tag: title
#, no-c-format
@@ -1384,103 +536,53 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is not intended that users spend much time worrying about locking "
-"strategies. It is usually enough to specify an isolation level for the JDBC "
-"connections and then simply let the database do all the work. However, "
-"advanced users may wish to obtain exclusive pessimistic locks or re-obtain "
-"locks at the start of a new transaction."
-msgstr ""
-"用户其实并不需要花很多精力去担心锁定策略的问题。通常情况下,只要为 JDBC 连接"
-"指定一下隔离级别,然后让数据库去搞定一切就够了。然而,高级用户有时候希望进行"
-"一个排它的悲观锁定,或者在一个新的事务启动的时候,重新进行锁定。 "
+msgid "It is not intended that users spend much time worrying about locking strategies. It is usually enough to specify an isolation level for the JDBC connections and then simply let the database do all the work. However, advanced users may wish to obtain exclusive pessimistic locks or re-obtain locks at the start of a new transaction."
+msgstr "用户其实并不需要花很多精力去担心锁定策略的问题。通常情况下,只要为 JDBC 连接指定一下隔离级别,然后让数据库去搞定一切就够了。然而,高级用户有时候希望进行一个排它的悲观锁定,或者在一个新的事务启动的时候,重新进行锁定。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate will always use the locking mechanism of the database; it never "
-"lock objects in memory."
+msgid "Hibernate will always use the locking mechanism of the database; it never lock objects in memory."
msgstr "Hibernate 总是使用数据库的锁定机制,从不在内存中锁定对象。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>LockMode</literal> class defines the different lock levels that "
-"can be acquired by Hibernate. A lock is obtained by the following mechanisms:"
-msgstr ""
-"类 <literal>LockMode</literal> 定义了 Hibernate 所需的不同的锁定级别。一个锁"
-"定可以通过以下的机制来设置:"
+msgid "The <literal>LockMode</literal> class defines the different lock levels that can be acquired by Hibernate. A lock is obtained by the following mechanisms:"
+msgstr "类 <literal>LockMode</literal> 定义了 Hibernate 所需的不同的锁定级别。一个锁定可以通过以下的机制来设置:"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockMode.WRITE</literal> is acquired automatically when Hibernate "
-"updates or inserts a row."
-msgstr ""
-"当 Hibernate 更新或者插入一行记录的时候,锁定级别自动设置为 "
-"<literal>LockMode.WRITE</literal>。"
+msgid "<literal>LockMode.WRITE</literal> is acquired automatically when Hibernate updates or inserts a row."
+msgstr "当 Hibernate 更新或者插入一行记录的时候,锁定级别自动设置为 <literal>LockMode.WRITE</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockMode.UPGRADE</literal> can be acquired upon explicit user "
-"request using <literal>SELECT ... FOR UPDATE</literal> on databases which "
-"support that syntax."
-msgstr ""
-"当用户显式的使用数据库支持的 SQL 格式 <literal>SELECT ... FOR UPDATE</"
-"literal> 发送 SQL 的时候,锁定级别设置为 <literal>LockMode.UPGRADE</"
-"literal>。 "
+msgid "<literal>LockMode.UPGRADE</literal> can be acquired upon explicit user request using <literal>SELECT ... FOR UPDATE</literal> on databases which support that syntax."
+msgstr "当用户显式的使用数据库支持的 SQL 格式 <literal>SELECT ... FOR UPDATE</literal> 发送 SQL 的时候,锁定级别设置为 <literal>LockMode.UPGRADE</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockMode.UPGRADE_NOWAIT</literal> can be acquired upon explicit "
-"user request using a <literal>SELECT ... FOR UPDATE NOWAIT</literal> under "
-"Oracle."
-msgstr ""
-"当用户显式的使用 Oracle 数据库的 SQL 语句 <literal>SELECT ... FOR UPDATE "
-"NOWAIT</literal> 的时候,锁定级别设置 <literal>LockMode.UPGRADE_NOWAIT</"
-"literal>。 "
+msgid "<literal>LockMode.UPGRADE_NOWAIT</literal> can be acquired upon explicit user request using a <literal>SELECT ... FOR UPDATE NOWAIT</literal> under Oracle."
+msgstr "当用户显式的使用 Oracle 数据库的 SQL 语句 <literal>SELECT ... FOR UPDATE NOWAIT</literal> 的时候,锁定级别设置 <literal>LockMode.UPGRADE_NOWAIT</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockMode.READ</literal> is acquired automatically when Hibernate "
-"reads data under Repeatable Read or Serializable isolation level. It can be "
-"re-acquired by explicit user request."
-msgstr ""
-"当 Hibernate 在“可重复读”或者是“序列化”数据库隔离级别下读取数据的时候,锁定模"
-"式自动设置为 <literal>LockMode.READ</literal>。这种模式也可以通过用户显式指定"
-"进行设置。 "
+msgid "<literal>LockMode.READ</literal> is acquired automatically when Hibernate reads data under Repeatable Read or Serializable isolation level. It can be re-acquired by explicit user request."
+msgstr "当 Hibernate 在“可重复读”或者是“序列化”数据库隔离级别下读取数据的时候,锁定模式自动设置为 <literal>LockMode.READ</literal>。这种模式也可以通过用户显式指定进行设置。 "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>LockMode.NONE</literal> represents the absence of a lock. All "
-"objects switch to this lock mode at the end of a <literal>Transaction</"
-"literal>. Objects associated with the session via a call to <literal>update()"
-"</literal> or <literal>saveOrUpdate()</literal> also start out in this lock "
-"mode."
-msgstr ""
-"<literal>LockMode.NONE</literal> 代表无需锁定。在 <literal>Transaction</"
-"literal> 结束时, 所有的对象都切换到该模式上来。与 session 相关联的对象通过调"
-"用 <literal>update()</literal> 或者 <literal>saveOrUpdate()</literal> 脱离该"
-"模式。"
+msgid "<literal>LockMode.NONE</literal> represents the absence of a lock. All objects switch to this lock mode at the end of a <literal>Transaction</literal>. Objects associated with the session via a call to <literal>update()</literal> or <literal>saveOrUpdate()</literal> also start out in this lock mode."
+msgstr "<literal>LockMode.NONE</literal> 代表无需锁定。在 <literal>Transaction</literal> 结束时, 所有的对象都切换到该模式上来。与 session 相关联的对象通过调用 <literal>update()</literal> 或者 <literal>saveOrUpdate()</literal> 脱离该模式。"
#. Tag: para
#, no-c-format
-msgid ""
-"The \"explicit user request\" is expressed in one of the following ways:"
+msgid "The \"explicit user request\" is expressed in one of the following ways:"
msgstr "\"显式的用户指定\"可以通过以下几种方式之一来表示:"
#. Tag: para
#, no-c-format
-msgid ""
-"A call to <literal>Session.load()</literal>, specifying a <literal>LockMode</"
-"literal>."
-msgstr ""
-"调用 <literal>Session.load()</literal> 的时候指定<literal>锁定模式"
-"(LockMode)</literal>。"
+msgid "A call to <literal>Session.load()</literal>, specifying a <literal>LockMode</literal>."
+msgstr "调用 <literal>Session.load()</literal> 的时候指定<literal>锁定模式(LockMode)</literal>。"
#. Tag: para
#, no-c-format
@@ -1494,46 +596,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If <literal>Session.load()</literal> is called with <literal>UPGRADE</"
-"literal> or <literal>UPGRADE_NOWAIT</literal>, and the requested object was "
-"not yet loaded by the session, the object is loaded using "
-"<literal>SELECT ... FOR UPDATE</literal>. If <literal>load()</literal> is "
-"called for an object that is already loaded with a less restrictive lock "
-"than the one requested, Hibernate calls <literal>lock()</literal> for that "
-"object."
-msgstr ""
-"如果在 <literal>UPGRADE</literal> 或者 <literal>UPGRADE_NOWAIT</literal> 锁定"
-"模式下调用 <literal>Session.load()</literal>,并且要读取的对象尚未被 session "
-"载入过,那么对象通过 <literal>SELECT ... FOR UPDATE</literal> 这样的 SQL 语句"
-"被载入。如果为一个对象调用 <literal>load()</literal> 方法时,该对象已经在另"
-"一个较少限制的锁定模式下被载入了,那么 Hibernate 就对该对象调用 "
-"<literal>lock()</literal> 方法。"
+msgid "If <literal>Session.load()</literal> is called with <literal>UPGRADE</literal> or <literal>UPGRADE_NOWAIT</literal>, and the requested object was not yet loaded by the session, the object is loaded using <literal>SELECT ... FOR UPDATE</literal>. If <literal>load()</literal> is called for an object that is already loaded with a less restrictive lock than the one requested, Hibernate calls <literal>lock()</literal> for that object."
+msgstr "如果在 <literal>UPGRADE</literal> 或者 <literal>UPGRADE_NOWAIT</literal> 锁定模式下调用 <literal>Session.load()</literal>,并且要读取的对象尚未被 session 载入过,那么对象通过 <literal>SELECT ... FOR UPDATE</literal> 这样的 SQL 语句被载入。如果为一个对象调用 <literal>load()</literal> 方法时,该对象已经在另一个较少限制的锁定模式下被载入了,那么 Hibernate 就对该对象调用 <literal>lock()</literal> 方法。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>Session.lock()</literal> performs a version number check if the "
-"specified lock mode is <literal>READ</literal>, <literal>UPGRADE</literal> "
-"or <literal>UPGRADE_NOWAIT</literal>. In the case of <literal>UPGRADE</"
-"literal> or <literal>UPGRADE_NOWAIT</literal>, <literal>SELECT ... FOR "
-"UPDATE</literal> is used."
-msgstr ""
-"如果指定的锁定模式是 <literal>READ</literal>,<literal>UPGRADE</literal> 或 "
-"<literal>UPGRADE_NOWAIT</literal>,那么 <literal>Session.lock()</literal> 就"
-"执行版本号检查。(在 <literal>UPGRADE</literal> 或者 "
-"<literal>UPGRADE_NOWAIT</literal> 锁定模式下,执行 <literal>SELECT ... FOR "
-"UPDATE</literal>这样的SQL语句。)"
+msgid "<literal>Session.lock()</literal> performs a version number check if the specified lock mode is <literal>READ</literal>, <literal>UPGRADE</literal> or <literal>UPGRADE_NOWAIT</literal>. In the case of <literal>UPGRADE</literal> or <literal>UPGRADE_NOWAIT</literal>, <literal>SELECT ... FOR UPDATE</literal> is used."
+msgstr "如果指定的锁定模式是 <literal>READ</literal>,<literal>UPGRADE</literal> 或 <literal>UPGRADE_NOWAIT</literal>,那么 <literal>Session.lock()</literal> 就执行版本号检查。(在 <literal>UPGRADE</literal> 或者 <literal>UPGRADE_NOWAIT</literal> 锁定模式下,执行 <literal>SELECT ... FOR UPDATE</literal>这样的SQL语句。)"
#. Tag: para
#, no-c-format
-msgid ""
-"If the requested lock mode is not supported by the database, Hibernate uses "
-"an appropriate alternate mode instead of throwing an exception. This ensures "
-"that applications are portable."
-msgstr ""
-"如果数据库不支持用户设置的锁定模式,Hibernate 将使用适当的替代模式(而不是扔"
-"出异常)。这一点可以确保应用程序的可移植性。 "
+msgid "If the requested lock mode is not supported by the database, Hibernate uses an appropriate alternate mode instead of throwing an exception. This ensures that applications are portable."
+msgstr "如果数据库不支持用户设置的锁定模式,Hibernate 将使用适当的替代模式(而不是扔出异常)。这一点可以确保应用程序的可移植性。 "
#. Tag: title
#, no-c-format
@@ -1542,133 +616,51 @@
#. Tag: para
#, no-c-format
-msgid ""
-"One of the legacies of Hibernate 2.x JDBC connection management meant that a "
-"<literal>Session</literal> would obtain a connection when it was first "
-"required and then maintain that connection until the session was closed. "
-"Hibernate 3.x introduced the notion of connection release modes that would "
-"instruct a session how to handle its JDBC connections. The following "
-"discussion is pertinent only to connections provided through a configured "
-"<literal>ConnectionProvider</literal>. User-supplied connections are outside "
-"the breadth of this discussion. The different release modes are identified "
-"by the enumerated values of <literal>org.hibernate.ConnectionReleaseMode</"
-"literal>:"
-msgstr ""
-"Hibernate 关于 JDBC 连接管理的旧(2.x)行为是,<literal>Session</literal> 在"
-"第一次需要的时候获取一个连接,在 session 关闭之前一直会持有这个连接。"
-"Hibernate 引入了连接释放的概念,来告诉 session 如何处理它的 JDBC 连接。注意,"
-"下面的讨论只适用于采用配置 <literal>ConnectionProvider</literal> 来提供连接的"
-"情况,用户自己提供的连接与这里的讨论无关。通过 <literal>org.hibernate."
-"ConnectionReleaseMode</literal> 的不同枚举值来使用不用的释放模式:"
+msgid "One of the legacies of Hibernate 2.x JDBC connection management meant that a <literal>Session</literal> would obtain a connection when it was first required and then maintain that connection until the session was closed. Hibernate 3.x introduced the notion of connection release modes that would instruct a session how to handle its JDBC connections. The following discussion is pertinent only to connections provided through a configured <literal>ConnectionProvider</literal>. User-supplied connections are outside the breadth of this discussion. The different release modes are identified by the enumerated values of <literal>org.hibernate.ConnectionReleaseMode</literal>:"
+msgstr "Hibernate 关于 JDBC 连接管理的旧(2.x)行为是,<literal>Session</literal> 在第一次需要的时候获取一个连接,在 session 关闭之前一直会持有这个连接。Hibernate 引入了连接释放的概念,来告诉 session 如何处理它的 JDBC 连接。注意,下面的讨论只适用于采用配置 <literal>ConnectionProvider</literal> 来提供连接的情况,用户自己提供的连接与这里的讨论无关。通过 <literal>org.hibernate.ConnectionReleaseMode</literal> 的不同枚举值来使用不用的释放模式:"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>ON_CLOSE</literal>: is the legacy behavior described above. The "
-"Hibernate session obtains a connection when it first needs to perform some "
-"JDBC access and maintains that connection until the session is closed."
-msgstr ""
-"<literal>ON_CLOSE</literal>:基本上就是上面提到的老式行为。Hibernate session "
-"在第一次需要进行 JDBC 操作的时候获取连接,然后持有它,直到 session 关闭。"
+msgid "<literal>ON_CLOSE</literal>: is the legacy behavior described above. The Hibernate session obtains a connection when it first needs to perform some JDBC access and maintains that connection until the session is closed."
+msgstr "<literal>ON_CLOSE</literal>:基本上就是上面提到的老式行为。Hibernate session 在第一次需要进行 JDBC 操作的时候获取连接,然后持有它,直到 session 关闭。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>AFTER_TRANSACTION</literal>: releases connections after a "
-"<literal>org.hibernate.Transaction</literal> has been completed."
-msgstr ""
-"<literal>AFTER_TRANSACTION</literal>:在 <literal>org.hibernate.Transaction</"
-"literal> 结束后释放连接。"
+msgid "<literal>AFTER_TRANSACTION</literal>: releases connections after a <literal>org.hibernate.Transaction</literal> has been completed."
+msgstr "<literal>AFTER_TRANSACTION</literal>:在 <literal>org.hibernate.Transaction</literal> 结束后释放连接。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>AFTER_STATEMENT</literal> (also referred to as aggressive release): "
-"releases connections after every statement execution. This aggressive "
-"releasing is skipped if that statement leaves open resources associated with "
-"the given session. Currently the only situation where this occurs is through "
-"the use of <literal>org.hibernate.ScrollableResults</literal>."
-msgstr ""
-"<literal>AFTER_STATEMENT</literal>(也被称做积极释放):在每一条语句被执行后"
-"就释放连接。但假若语句留下了与 session 相关的资源,那就不会被释放。目前唯一的"
-"这种情形就是使用 <literal>org.hibernate.ScrollableResults</literal>。"
+msgid "<literal>AFTER_STATEMENT</literal> (also referred to as aggressive release): releases connections after every statement execution. This aggressive releasing is skipped if that statement leaves open resources associated with the given session. Currently the only situation where this occurs is through the use of <literal>org.hibernate.ScrollableResults</literal>."
+msgstr "<literal>AFTER_STATEMENT</literal>(也被称做积极释放):在每一条语句被执行后就释放连接。但假若语句留下了与 session 相关的资源,那就不会被释放。目前唯一的这种情形就是使用 <literal>org.hibernate.ScrollableResults</literal>。"
#. Tag: para
#, no-c-format
-msgid ""
-"The configuration parameter <literal>hibernate.connection.release_mode</"
-"literal> is used to specify which release mode to use. The possible values "
-"are as follows:"
-msgstr ""
-"<literal>hibernate.connection.release_mode</literal> 配置参数用来指定使用哪一"
-"种释放模式。可能的值有: "
+msgid "The configuration parameter <literal>hibernate.connection.release_mode</literal> is used to specify which release mode to use. The possible values are as follows:"
+msgstr "<literal>hibernate.connection.release_mode</literal> 配置参数用来指定使用哪一种释放模式。可能的值有: "
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>auto</literal> (the default): this choice delegates to the release "
-"mode returned by the <literal>org.hibernate.transaction.TransactionFactory."
-"getDefaultReleaseMode()</literal> method. For JTATransactionFactory, this "
-"returns ConnectionReleaseMode.AFTER_STATEMENT; for JDBCTransactionFactory, "
-"this returns ConnectionReleaseMode.AFTER_TRANSACTION. Do not change this "
-"default behavior as failures due to the value of this setting tend to "
-"indicate bugs and/or invalid assumptions in user code."
-msgstr ""
-"<literal>auto</literal>(默认):这一选择把释放模式委派给 <literal>org."
-"hibernate.transaction.TransactionFactory.getDefaultReleaseMode()</literal> 方"
-"法。对 JTATransactionFactory 来说,它会返回 ConnectionReleaseMode."
-"AFTER_STATEMENT;对 JDBCTransactionFactory 来说,则是ConnectionReleaseMode."
-"AFTER_TRANSACTION。很少需要修改这一默认行为,因为假若设置不当,就会带来 bug,"
-"或者给用户代码带来误导。"
+msgid "<literal>auto</literal> (the default): this choice delegates to the release mode returned by the <literal>org.hibernate.transaction.TransactionFactory.getDefaultReleaseMode()</literal> method. For JTATransactionFactory, this returns ConnectionReleaseMode.AFTER_STATEMENT; for JDBCTransactionFactory, this returns ConnectionReleaseMode.AFTER_TRANSACTION. Do not change this default behavior as failures due to the value of this setting tend to indicate bugs and/or invalid assumptions in user code."
+msgstr "<literal>auto</literal>(默认):这一选择把释放模式委派给 <literal>org.hibernate.transaction.TransactionFactory.getDefaultReleaseMode()</literal> 方法。对 JTATransactionFactory 来说,它会返回 ConnectionReleaseMode.AFTER_STATEMENT;对 JDBCTransactionFactory 来说,则是ConnectionReleaseMode.AFTER_TRANSACTION。很少需要修改这一默认行为,因为假若设置不当,就会带来 bug,或者给用户代码带来误导。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>on_close</literal>: uses ConnectionReleaseMode.ON_CLOSE. This "
-"setting is left for backwards compatibility, but its use is discouraged."
-msgstr ""
-"<literal>on_close</literal>:使用 ConnectionReleaseMode.ON_CLOSE。这种方式是"
-"为了向下兼容的,但是已经完全不被鼓励使用了。"
+msgid "<literal>on_close</literal>: uses ConnectionReleaseMode.ON_CLOSE. This setting is left for backwards compatibility, but its use is discouraged."
+msgstr "<literal>on_close</literal>:使用 ConnectionReleaseMode.ON_CLOSE。这种方式是为了向下兼容的,但是已经完全不被鼓励使用了。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>after_transaction</literal>: uses ConnectionReleaseMode."
-"AFTER_TRANSACTION. This setting should not be used in JTA environments. Also "
-"note that with ConnectionReleaseMode.AFTER_TRANSACTION, if a session is "
-"considered to be in auto-commit mode, connections will be released as if the "
-"release mode were AFTER_STATEMENT."
-msgstr ""
-"<literal>after_transaction</literal>:使用 ConnectionReleaseMode."
-"AFTER_TRANSACTION。这一设置不应该在 JTA 环境下使用。也要注意,使用 "
-"ConnectionReleaseMode.AFTER_TRANSACTION 的时候,假若session 处于 auto-commit "
-"状态,连接会像 AFTER_STATEMENT 那样被释放。"
+msgid "<literal>after_transaction</literal>: uses ConnectionReleaseMode.AFTER_TRANSACTION. This setting should not be used in JTA environments. Also note that with ConnectionReleaseMode.AFTER_TRANSACTION, if a session is considered to be in auto-commit mode, connections will be released as if the release mode were AFTER_STATEMENT."
+msgstr "<literal>after_transaction</literal>:使用 ConnectionReleaseMode.AFTER_TRANSACTION。这一设置不应该在 JTA 环境下使用。也要注意,使用 ConnectionReleaseMode.AFTER_TRANSACTION 的时候,假若session 处于 auto-commit 状态,连接会像 AFTER_STATEMENT 那样被释放。"
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>after_statement</literal>: uses ConnectionReleaseMode."
-"AFTER_STATEMENT. Additionally, the configured <literal>ConnectionProvider</"
-"literal> is consulted to see if it supports this setting "
-"(<literal>supportsAggressiveRelease()</literal>). If not, the release mode "
-"is reset to ConnectionReleaseMode.AFTER_TRANSACTION. This setting is only "
-"safe in environments where we can either re-acquire the same underlying JDBC "
-"connection each time you make a call into <literal>ConnectionProvider."
-"getConnection()</literal> or in auto-commit environments where it does not "
-"matter if we re-establish the same connection."
-msgstr ""
-"<literal>after_statement</literal>:使用 ConnectionReleaseMode."
-"AFTER_STATEMENT。除此之外,会查询配置的 <literal>ConnectionProvider</"
-"literal>,是否它支持这一设置(<literal>supportsAggressiveRelease()</"
-"literal>)。假若不支持,释放模式会被设置为 ConnectionReleaseMode."
-"AFTER_TRANSACTION。只有在你每次调用 <literal>ConnectionProvider."
-"getConnection()</literal> 获取底层 JDBC 连接的时候,都可以确信获得同一个连接"
-"的时候,这一设置才是安全的;或者在 auto-commit 环境中,你可以不管是否每次都获"
-"得同一个连接的时候,这才是安全的。"
+msgid "<literal>after_statement</literal>: uses ConnectionReleaseMode.AFTER_STATEMENT. Additionally, the configured <literal>ConnectionProvider</literal> is consulted to see if it supports this setting (<literal>supportsAggressiveRelease()</literal>). If not, the release mode is reset to ConnectionReleaseMode.AFTER_TRANSACTION. This setting is only safe in environments where we can either re-acquire the same underlying JDBC connection each time you make a call into <literal>ConnectionProvider.getConnection()</literal> or in auto-commit environments where it does not matter if we re-establish the same connection."
+msgstr "<literal>after_statement</literal>:使用 ConnectionReleaseMode.AFTER_STATEMENT。除此之外,会查询配置的 <literal>ConnectionProvider</literal>,是否它支持这一设置(<literal>supportsAggressiveRelease()</literal>)。假若不支持,释放模式会被设置为 ConnectionReleaseMode.AFTER_TRANSACTION。只有在你每次调用 <literal>ConnectionProvider.getConnection()</literal> 获取底层 JDBC 连接的时候,都可以确信获得同一个连接的时候,这一设置才是安全的;或者在 auto-commit 环境中,你可以不管是否每次都获得同一个连接的时候,这才是安全的。"
#~ msgid "foo==bar"
#~ msgstr "foo==bar"
-
#~ msgid ""
#~ "<![CDATA[// Non-managed environment idiom\n"
#~ "Session sess = factory.openSession();\n"
@@ -1707,7 +699,6 @@
#~ "finally {\n"
#~ " sess.close();\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[// Non-managed environment idiom with getCurrentSession()\n"
#~ "try {\n"
@@ -1736,7 +727,6 @@
#~ " factory.getCurrentSession().getTransaction().rollback();\n"
#~ " throw e; // or display error message\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[// BMT idiom\n"
#~ "Session sess = factory.openSession();\n"
@@ -1775,7 +765,6 @@
#~ "finally {\n"
#~ " sess.close();\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[// BMT idiom with getCurrentSession()\n"
#~ "try {\n"
@@ -1812,7 +801,6 @@
#~ " tx.rollback();\n"
#~ " throw e; // or display error message\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[// CMT idiom\n"
#~ " Session sess = factory.getCurrentSession();\n"
@@ -1827,7 +815,6 @@
#~ " // do some work\n"
#~ " ...\n"
#~ "]]>"
-
#~ msgid ""
#~ "<![CDATA[\n"
#~ "Session sess = factory.openSession();\n"
@@ -1868,7 +855,6 @@
#~ "finally {\n"
#~ " sess.close();\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[// foo is an instance loaded by a previous Session\n"
#~ "session = factory.openSession();\n"
@@ -1895,7 +881,6 @@
#~ "\n"
#~ "t.commit();\n"
#~ "session.close();]]>"
-
#~ msgid ""
#~ "<![CDATA[// foo is an instance loaded earlier by the old session\n"
#~ "Transaction t = session.beginTransaction(); // Obtain a new JDBC "
@@ -1916,7 +901,6 @@
#~ "session.flush(); // Only for last transaction in conversation\n"
#~ "t.commit(); // Also return JDBC connection\n"
#~ "session.close(); // Only for last transaction in conversation]]>"
-
#~ msgid ""
#~ "<![CDATA[// foo is an instance loaded by a previous Session\n"
#~ "foo.setProperty(\"bar\");\n"
@@ -1935,3 +919,4 @@
#~ "loaded already\n"
#~ "t.commit();\n"
#~ "session.close();]]>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/tutorial.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/tutorial.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/tutorial.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -7,7 +7,7 @@
"Project-Id-Version: tutorial\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-11T05:38:16\n"
-"PO-Revision-Date: 2010-01-11 10:56+1000\n"
+"PO-Revision-Date: 2010-03-16 09:56+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -22,34 +22,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Intended for new users, this chapter provides an step-by-step introduction "
-"to Hibernate, starting with a simple application using an in-memory "
-"database. The tutorial is based on an earlier tutorial developed by Michael "
-"Gloegl. All code is contained in the <filename>tutorials/web</filename> "
-"directory of the project source."
-msgstr ""
-"面向新用户,从一个简单的使用内存数据库的例子开始,本章提供对 Hibernate 的逐步"
-"介绍。本教程基于 Michael Gloegl 早期编写的手册。所有代码都包含在 "
-"<filename>tutorials/web</filename> 目录下。"
+msgid "Intended for new users, this chapter provides an step-by-step introduction to Hibernate, starting with a simple application using an in-memory database. The tutorial is based on an earlier tutorial developed by Michael Gloegl. All code is contained in the <filename>tutorials/web</filename> directory of the project source."
+msgstr "面向新用户,从一个简单的使用内存数据库的例子开始,本章提供对 Hibernate 的逐步介绍。本教程基于 Michael Gloegl 早期编写的手册。所有代码都包含在 <filename>tutorials/web</filename> 目录下。"
#. Tag: para
#, no-c-format
-msgid ""
-"This tutorial expects the user have knowledge of both Java and SQL. If you "
-"have a limited knowledge of JAVA or SQL, it is advised that you start with a "
-"good introduction to that technology prior to attempting to learn Hibernate."
-msgstr ""
-"本教程期望用户具备 Java 和 SQL 知识。如果你这方面的知识有限,我们建议你在学"
-"习 Hibernate 之前先好好了解这些技术。"
+msgid "This tutorial expects the user have knowledge of both Java and SQL. If you have a limited knowledge of JAVA or SQL, it is advised that you start with a good introduction to that technology prior to attempting to learn Hibernate."
+msgstr "本教程期望用户具备 Java 和 SQL 知识。如果你这方面的知识有限,我们建议你在学习 Hibernate 之前先好好了解这些技术。"
#. Tag: para
#, no-c-format
-msgid ""
-"The distribution contains another example application under the "
-"<filename>tutorial/eg</filename> project source directory."
-msgstr ""
-"本版本在源代码目录 <filename>tutorial/eg</filename> 下还包含另外一个例程。"
+msgid "The distribution contains another example application under the <filename>tutorial/eg</filename> project source directory."
+msgstr "本版本在源代码目录 <filename>tutorial/eg</filename> 下还包含另外一个例程。"
#. Tag: title
#, no-c-format
@@ -58,25 +42,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"For this example, we will set up a small database application that can store "
-"events we want to attend and information about the host(s) of these events."
-msgstr ""
-"在这个例子里,我们将设立一个小应用程序可以保存我们希望参加的活动(events)和"
-"这些活动主办方的相关信息。(译者注:在本教程的后面部分,我们将直接使用 event "
-"而不是它的中文翻译“活动”,以免混淆。)"
+msgid "For this example, we will set up a small database application that can store events we want to attend and information about the host(s) of these events."
+msgstr "在这个例子里,我们将设立一个小应用程序可以保存我们希望参加的活动(events)和这些活动主办方的相关信息。(译者注:在本教程的后面部分,我们将直接使用 event 而不是它的中文翻译“活动”,以免混淆。)"
#. Tag: para
#, no-c-format
-msgid ""
-"Although you can use whatever database you feel comfortable using, we will "
-"use <ulink url=\"http://hsqldb.org/\">HSQLDB</ulink> (an in-memory, Java "
-"database) to avoid describing installation/setup of any particular database "
-"servers."
-msgstr ""
-"虽然你可以使用任何数据库,我们还是用 <ulink url=\"http://hsqldb.org/"
-"\">HSQLDB</ulink>(一个用 Java 编写的内存数据库)来避免花费篇章对数据库服务器"
-"的安装/配置进行解释。"
+msgid "Although you can use whatever database you feel comfortable using, we will use <ulink url=\"http://hsqldb.org/\">HSQLDB</ulink> (an in-memory, Java database) to avoid describing installation/setup of any particular database servers."
+msgstr "虽然你可以使用任何数据库,我们还是用 <ulink url=\"http://hsqldb.org/\">HSQLDB</ulink>(一个用 Java 编写的内存数据库)来避免花费篇章对数据库服务器的安装/配置进行解释。"
#. Tag: title
#, no-c-format
@@ -85,65 +57,22 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The first thing we need to do is to set up the development environment. We "
-"will be using the \"standard layout\" advocated by alot of build tools such "
-"as <ulink url=\"http://maven.org\">Maven</ulink>. Maven, in particular, has "
-"a good resource describing this <ulink url=\"http://maven.apache.org/guides/"
-"introduction/introduction-to-the-standard-directory-layout.html\">layout</"
-"ulink>. As this tutorial is to be a web application, we will be creating and "
-"making use of <filename>src/main/java</filename>, <filename>src/main/"
-"resources</filename> and <filename>src/main/webapp</filename> directories."
-msgstr ""
-"我们需要做的第一件事情是设置开发环境。我们将使用许多构建工具如 <ulink url="
-"\"http://maven.org\">Maven</ulink> 所鼓吹的“标准格式”。特别是 Maven,它的资源"
-"对这个<ulink url=\"http://maven.apache.org/guides/introduction/introduction-"
-"to-the-standard-directory-layout.html\">格式(layout)</ulink>有着很好的描"
-"述。因为本教程使用的是 web 应用程序,我么将创建和使用 <filename>src/main/"
-"java</filename>、<filename>src/main/resources</filename> 和 <filename>src/"
-"main/webapp</filename> 目录。"
+msgid "The first thing we need to do is to set up the development environment. We will be using the \"standard layout\" advocated by alot of build tools such as <ulink url=\"http://maven.org\">Maven</ulink>. Maven, in particular, has a good resource describing this <ulink url=\"http://maven.apache.org/guides/introduction/introduction-to-the-standard-...">layout</ulink>. As this tutorial is to be a web application, we will be creating and making use of <filename>src/main/java</filename>, <filename>src/main/resources</filename> and <filename>src/main/webapp</filename> directories."
+msgstr "我们需要做的第一件事情是设置开发环境。我们将使用许多构建工具如 <ulink url=\"http://maven.org\">Maven</ulink> 所鼓吹的“标准格式”。特别是 Maven,它的资源对这个<ulink url=\"http://maven.apache.org/guides/introduction/introduction-to-the-standard-...">格式(layout)</ulink>有着很好的描述。因为本教程使用的是 web 应用程序,我么将创建和使用 <filename>src/main/java</filename>、<filename>src/main/resources</filename> 和 <filename>src/main/webapp</filename> 目录。"
#. Tag: para
#, no-c-format
-msgid ""
-"We will be using Maven in this tutorial, taking advantage of its transitive "
-"dependency management capabilities as well as the ability of many IDEs to "
-"automatically set up a project for us based on the maven descriptor."
-msgstr ""
-"在本教程里我们将使用 Maven,利用其 transitive dependency 管理以及根据 Maven "
-"描述符用 IDE 自动设置项目的能力。"
+msgid "We will be using Maven in this tutorial, taking advantage of its transitive dependency management capabilities as well as the ability of many IDEs to automatically set up a project for us based on the maven descriptor."
+msgstr "在本教程里我们将使用 Maven,利用其 transitive dependency 管理以及根据 Maven 描述符用 IDE 自动设置项目的能力。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"It is not a requirement to use Maven. If you wish to use something else to "
-"build this tutoial (such as Ant), the layout will remain the same. The only "
-"change is that you will need to manually account for all the needed "
-"dependencies. If you use something like <ulink url=\"http://ant.apache.org/"
-"ivy/\">Ivy</ulink> providing transitive dependency management you would "
-"still use the dependencies mentioned below. Otherwise, you'd need to grab "
-"<emphasis>all</emphasis> dependencies, both explicit and transitive, and add "
-"them to the project's classpath. If working from the Hibernate distribution "
-"bundle, this would mean <filename>hibernate3.jar</filename>, all artifacts "
-"in the <filename>lib/required</filename> directory and all files from either "
-"the <filename>lib/bytecode/cglib</filename> or <filename>lib/bytecode/"
-"javassist</filename> directory; additionally you will need both the servlet-"
-"api jar and one of the slf4j logging backends."
-msgstr ""
-"Maven 并不要求这样。如果你希望使用其他技术来构建这个教程(如 Ant),格式将保"
-"持不变。唯一的改变是你将需要手工管理所有的依赖关系。如果你使用 <ulink url="
-"\"http://ant.apache.org/ivy/\">Ivy</ulink> 来提供 transitive dependency 管"
-"理,你将仍然下面提到的依赖关系。否则,你将需要找到所有的依赖关系(显性的和过"
-"渡的)并把它们添加到项目的 classpath 里。如果使用 Hibernate 捆绑版本,这意味"
-"着 <filename>hibernate3.jar</filename>、<filename>lib/required</filename> 目"
-"录下的所有 artifact 和 <filename>lib/bytecode/cglib</filename> 或 "
-"<filename>lib/bytecode/javassist</filename> 下的所有文件,此外你将需要 "
-"servlet-api jar 和一个 slf4j 日志后台文件。 "
+#, no-c-format
+msgid "It is not a requirement to use Maven. If you wish to use something else to build this tutoial (such as Ant), the layout will remain the same. The only change is that you will need to manually account for all the needed dependencies. If you use something like <ulink url=\"http://ant.apache.org/ivy/\">Ivy</ulink> providing transitive dependency management you would still use the dependencies mentioned below. Otherwise, you'd need to grab <emphasis>all</emphasis> dependencies, both explicit and transitive, and add them to the project's classpath. If working from the Hibernate distribution bundle, this would mean <filename>hibernate3.jar</filename>, all artifacts in the <filename>lib/required</filename> directory and all files from either the <filename>lib/bytecode/cglib</filename> or <filename>lib/bytecode/javassist</filename> directory; additionally you will need both the servlet-api jar and one of the slf4j logging backends."
+msgstr "Maven 并不要求这样。如果你希望使用其他技术来构建这个教程(如 Ant),格式将保持不变。唯一的改变是你将需要手工管理所有的依赖关系。如果你使用 <ulink url=\"http://ant.apache.org/ivy/\">Ivy</ulink> 来提供 transitive dependency 管理,你将仍然下面提到的依赖关系。否则,你将需要找到所有的依赖关系(显性的和过渡的)并把它们添加到项目的 classpath 里。如果使用 Hibernate 捆绑版本,这意味着 <filename>hibernate3.jar</filename>、<filename>lib/required</filename> 目录下的所有 artifact 和 <filename>lib/bytecode/cglib</filename> 或 <filename>lib/bytecode/javassist</filename> 下的所有文件,此外你将需要 servlet-api jar 和一个 slf4j 日志后台文件。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Save this file as <filename>pom.xml</filename> in the project root directory."
+msgid "Save this file as <filename>pom.xml</filename> in the project root directory."
msgstr "把这个文件保存为项目根目录下的 <filename>pom.xml</filename>。"
#. Tag: title
@@ -153,75 +82,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Next, we create a class that represents the event we want to store in the "
-"database; it is a simple JavaBean class with some properties:"
-msgstr ""
-"接下来我们创建一个类,用来代表那些我们希望储存在数据库里的 event,这是一个具"
-"有一些属性的简单 JavaBean 类:"
+msgid "Next, we create a class that represents the event we want to store in the database; it is a simple JavaBean class with some properties:"
+msgstr "接下来我们创建一个类,用来代表那些我们希望储存在数据库里的 event,这是一个具有一些属性的简单 JavaBean 类:"
#. Tag: para
#, no-c-format
-msgid ""
-"This class uses standard JavaBean naming conventions for property getter and "
-"setter methods, as well as private visibility for the fields. Although this "
-"is the recommended design, it is not required. Hibernate can also access "
-"fields directly, the benefit of accessor methods is robustness for "
-"refactoring."
-msgstr ""
-"你可以看到这个类对属性的存取方法(getter and setter method)使用了标准 "
-"JavaBean 命名约定,同时把类属性(field)的访问级别设成私有的(private)。这是"
-"推荐的设计,但并不是必须的。Hibernate 也可以直接访问这些 field,而使用访问方"
-"法(accessor method)的好处是提供了重构时的健壮性(robustness)。"
+msgid "This class uses standard JavaBean naming conventions for property getter and setter methods, as well as private visibility for the fields. Although this is the recommended design, it is not required. Hibernate can also access fields directly, the benefit of accessor methods is robustness for refactoring."
+msgstr "你可以看到这个类对属性的存取方法(getter and setter method)使用了标准 JavaBean 命名约定,同时把类属性(field)的访问级别设成私有的(private)。这是推荐的设计,但并不是必须的。Hibernate 也可以直接访问这些 field,而使用访问方法(accessor method)的好处是提供了重构时的健壮性(robustness)。"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>id</literal> property holds a unique identifier value for a "
-"particular event. All persistent entity classes (there are less important "
-"dependent classes as well) will need such an identifier property if we want "
-"to use the full feature set of Hibernate. In fact, most applications, "
-"especially web applications, need to distinguish objects by identifier, so "
-"you should consider this a feature rather than a limitation. However, we "
-"usually do not manipulate the identity of an object, hence the setter method "
-"should be private. Only Hibernate will assign identifiers when an object is "
-"saved. Hibernate can access public, private, and protected accessor methods, "
-"as well as public, private and protected fields directly. The choice is up "
-"to you and you can match it to fit your application design."
-msgstr ""
-"对一特定的 event, <literal>id</literal> 属性持有唯一的标识符(identifier)的"
-"值。如果我们希望使用 Hibernate 提供的所有特性,那么所有的持久化实体"
-"(persistent entity)类(这里也包括一些次要依赖类)都需要一个这样的标识符属"
-"性。而事实上,大多数应用程序(特别是 web 应用程序)都需要通过标识符来区别对"
-"象,所以你应该考虑使用标识符属性而不是把它当作一种限制。然而,我们通常不会操"
-"作对象的标识(identity),因此它的 setter 方法的访问级别应该声明 private。这"
-"样当对象被保存的时候,只有 Hibernate 可以为它分配标识符值。你可看到Hibernate"
-"可以直接访问 public,private 和 protected 的访问方法和 field。所以选择哪种方"
-"式完全取决于你,你可以使你的选择与你的应用程序设计相吻合。 "
+msgid "The <literal>id</literal> property holds a unique identifier value for a particular event. All persistent entity classes (there are less important dependent classes as well) will need such an identifier property if we want to use the full feature set of Hibernate. In fact, most applications, especially web applications, need to distinguish objects by identifier, so you should consider this a feature rather than a limitation. However, we usually do not manipulate the identity of an object, hence the setter method should be private. Only Hibernate will assign identifiers when an object is saved. Hibernate can access public, private, and protected accessor methods, as well as public, private and protected fields directly. The choice is up to you and you can match it to fit your application design."
+msgstr "对一特定的 event, <literal>id</literal> 属性持有唯一的标识符(identifier)的值。如果我们希望使用 Hibernate 提供的所有特性,那么所有的持久化实体(persistent entity)类(这里也包括一些次要依赖类)都需要一个这样的标识符属性。而事实上,大多数应用程序(特别是 web 应用程序)都需要通过标识符来区别对象,所以你应该考虑使用标识符属性而不是把它当作一种限制。然而,我们通常不会操作对象的标识(identity),因此它的 setter 方法的访问级别应该声明 private。这样当对象被保存的时候,只有 Hibernate 可以为它分配标识符值。你可看到Hibernate可以直接访问 public,private 和 protected 的访问方法和 field。所以选择哪种方式完全取决于你,你可以使你的选择与你的应用程序设计相吻合。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The no-argument constructor is a requirement for all persistent classes; "
-"Hibernate has to create objects for you, using Java Reflection. The "
-"constructor can be private, however package or public visibility is required "
-"for runtime proxy generation and efficient data retrieval without bytecode "
-"instrumentation."
-msgstr ""
-"所有的持久化类(persistent classes)都要求有无参的构造器,因为 Hibernate 必须"
-"使用 Java 反射机制来为你创建对象。构造器(constructor)的访问级别可以是 "
-"private,然而当生成运行时代理(runtime proxy)的时候则要求使用至少是 package "
-"级别的访问控制,这样在没有字节码指令(bytecode instrumentation)的情况下,从"
-"持久化类里获取数据会更有效率。 "
+msgid "The no-argument constructor is a requirement for all persistent classes; Hibernate has to create objects for you, using Java Reflection. The constructor can be private, however package or public visibility is required for runtime proxy generation and efficient data retrieval without bytecode instrumentation."
+msgstr "所有的持久化类(persistent classes)都要求有无参的构造器,因为 Hibernate 必须使用 Java 反射机制来为你创建对象。构造器(constructor)的访问级别可以是 private,然而当生成运行时代理(runtime proxy)的时候则要求使用至少是 package 级别的访问控制,这样在没有字节码指令(bytecode instrumentation)的情况下,从持久化类里获取数据会更有效率。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Save this file to the <filename>src/main/java/org/hibernate/tutorial/domain</"
-"filename> directory."
-msgstr ""
-"把这个文件保存到 <filename>src/main/java/org/hibernate/tutorial/domain</"
-"filename> 目录下。"
+msgid "Save this file to the <filename>src/main/java/org/hibernate/tutorial/domain</filename> directory."
+msgstr "把这个文件保存到 <filename>src/main/java/org/hibernate/tutorial/domain</filename> 目录下。"
#. Tag: title
#, no-c-format
@@ -230,15 +112,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate needs to know how to load and store objects of the persistent "
-"class. This is where the Hibernate mapping file comes into play. The mapping "
-"file tells Hibernate what table in the database it has to access, and what "
-"columns in that table it should use."
-msgstr ""
-"Hibernate 需要知道怎样去加载(load)和存储(store)持久化类的对象。这正是 "
-"Hibernate 映射文件发挥作用的地方。映射文件告诉 Hibernate 它应该访问数据库"
-"(database)里面的哪个表(table)及应该使用表里面的哪些字段(column)。"
+msgid "Hibernate needs to know how to load and store objects of the persistent class. This is where the Hibernate mapping file comes into play. The mapping file tells Hibernate what table in the database it has to access, and what columns in that table it should use."
+msgstr "Hibernate 需要知道怎样去加载(load)和存储(store)持久化类的对象。这正是 Hibernate 映射文件发挥作用的地方。映射文件告诉 Hibernate 它应该访问数据库(database)里面的哪个表(table)及应该使用表里面的哪些字段(column)。"
#. Tag: para
#, no-c-format
@@ -247,194 +122,68 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate DTD is sophisticated. You can use it for auto-completion of XML "
-"mapping elements and attributes in your editor or IDE. Opening up the DTD "
-"file in your text editor is the easiest way to get an overview of all "
-"elements and attributes, and to view the defaults, as well as some comments. "
-"Hibernate will not load the DTD file from the web, but first look it up from "
-"the classpath of the application. The DTD file is included in "
-"<filename>hibernate-core.jar</filename> (it is also included in the "
-"<filename>hibernate3.jar</filename>, if using the distribution bundle)."
-msgstr ""
-"注意 Hibernate 的 DTD 是非常复杂的。你的编辑器或者 IDE 里使用它来自动完成那些"
-"用来映射的 XML 元素(element)和属性(attribute)。你也可以在文本编辑器里打"
-"开 DTD — 这是最简单的方式来概览所有的元素和 attribute,并查看它们的缺省值以及"
-"注释。注意 Hibernate 不会从 web 加载 DTD 文件,但它会首先在应用程序的 "
-"classpath 中查找。DTD 文件已包括在 <literal>hibernate3.jar</literal> 里,同时"
-"也在 Hibernate 发布包的 <literal>src/</literal> 目录下。 "
+msgid "Hibernate DTD is sophisticated. You can use it for auto-completion of XML mapping elements and attributes in your editor or IDE. Opening up the DTD file in your text editor is the easiest way to get an overview of all elements and attributes, and to view the defaults, as well as some comments. Hibernate will not load the DTD file from the web, but first look it up from the classpath of the application. The DTD file is included in <filename>hibernate-core.jar</filename> (it is also included in the <filename>hibernate3.jar</filename>, if using the distribution bundle)."
+msgstr "注意 Hibernate 的 DTD 是非常复杂的。你的编辑器或者 IDE 里使用它来自动完成那些用来映射的 XML 元素(element)和属性(attribute)。你也可以在文本编辑器里打开 DTD — 这是最简单的方式来概览所有的元素和 attribute,并查看它们的缺省值以及注释。注意 Hibernate 不会从 web 加载 DTD 文件,但它会首先在应用程序的 classpath 中查找。DTD 文件已包括在 <literal>hibernate3.jar</literal> 里,同时也在 Hibernate 发布包的 <literal>src/</literal> 目录下。 "
#. Tag: para
#, no-c-format
-msgid ""
-"We will omit the DTD declaration in future examples to shorten the code. It "
-"is, of course, not optional."
-msgstr ""
-"为缩短代码长度,在以后的例子里我们会省略 DTD 的声明。当然,在实际的应用程序"
-"中,DTD 声明是必需的。 "
+msgid "We will omit the DTD declaration in future examples to shorten the code. It is, of course, not optional."
+msgstr "为缩短代码长度,在以后的例子里我们会省略 DTD 的声明。当然,在实际的应用程序中,DTD 声明是必需的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Between the two <literal>hibernate-mapping</literal> tags, include a "
-"<literal>class</literal> element. All persistent entity classes (again, "
-"there might be dependent classes later on, which are not first-class "
-"entities) need a mapping to a table in the SQL database:"
-msgstr ""
-"在 <literal>hibernate-mapping</literal> 标签(tag)之间, 含有一个 "
-"<literal>class</literal> 元素。所有的持久化实体类(再次声明,或许接下来会有依"
-"赖类,就是那些次要的实体)都需要一个这样的映射,来把类对象映射到 SQL 数据库里"
-"的表:"
+msgid "Between the two <literal>hibernate-mapping</literal> tags, include a <literal>class</literal> element. All persistent entity classes (again, there might be dependent classes later on, which are not first-class entities) need a mapping to a table in the SQL database:"
+msgstr "在 <literal>hibernate-mapping</literal> 标签(tag)之间, 含有一个 <literal>class</literal> 元素。所有的持久化实体类(再次声明,或许接下来会有依赖类,就是那些次要的实体)都需要一个这样的映射,来把类对象映射到 SQL 数据库里的表:"
#. Tag: para
#, no-c-format
-msgid ""
-"So far we have told Hibernate how to persist and load object of class "
-"<literal>Event</literal> to the table <literal>EVENTS</literal>. Each "
-"instance is now represented by a row in that table. Now we can continue by "
-"mapping the unique identifier property to the tables primary key. As we do "
-"not want to care about handling this identifier, we configure Hibernate's "
-"identifier generation strategy for a surrogate primary key column:"
-msgstr ""
-"到目前为止,我们告诉了 Hibernate 怎样把 <literal>Events</literal> 类的对象持"
-"久化到数据库的 <literal>EVENTS</literal> 表里,以及怎样从 <literal>EVENTS</"
-"literal> 表加载到 <literal>Events</literal> 类的对象。每个实例对应着数据库表"
-"中的一行。现在我们将继续讨论有关唯一标识符属性到数据库表的映射。另外,由于我"
-"们不关心怎样处理这个标识符,我们就配置由 Hibernate 的标识符生成策略来产生代理"
-"主键字段:"
+msgid "So far we have told Hibernate how to persist and load object of class <literal>Event</literal> to the table <literal>EVENTS</literal>. Each instance is now represented by a row in that table. Now we can continue by mapping the unique identifier property to the tables primary key. As we do not want to care about handling this identifier, we configure Hibernate's identifier generation strategy for a surrogate primary key column:"
+msgstr "到目前为止,我们告诉了 Hibernate 怎样把 <literal>Events</literal> 类的对象持久化到数据库的 <literal>EVENTS</literal> 表里,以及怎样从 <literal>EVENTS</literal> 表加载到 <literal>Events</literal> 类的对象。每个实例对应着数据库表中的一行。现在我们将继续讨论有关唯一标识符属性到数据库表的映射。另外,由于我们不关心怎样处理这个标识符,我们就配置由 Hibernate 的标识符生成策略来产生代理主键字段:"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>id</literal> element is the declaration of the identifier "
-"property. The <literal>name=\"id\"</literal> mapping attribute declares the "
-"name of the JavaBean property and tells Hibernate to use the <literal>getId()"
-"</literal> and <literal>setId()</literal> methods to access the property. "
-"The column attribute tells Hibernate which column of the <literal>EVENTS</"
-"literal> table holds the primary key value."
-msgstr ""
-"<literal>id</literal> 元素是对 identifier 属性的声明。<literal>name=\"id\"</"
-"literal> 映射属性声明了 JavaBean 属性的名称并告诉 Hibernate 使用 "
-"<literal>getId()</literal> 和 <literal>setId()</literal> 方法来访问这个属性。"
-"column 属性告诉 Hibernate <literal>EVENTS</literal> 表的哪个字段持有主键值。"
+msgid "The <literal>id</literal> element is the declaration of the identifier property. The <literal>name=\"id\"</literal> mapping attribute declares the name of the JavaBean property and tells Hibernate to use the <literal>getId()</literal> and <literal>setId()</literal> methods to access the property. The column attribute tells Hibernate which column of the <literal>EVENTS</literal> table holds the primary key value."
+msgstr "<literal>id</literal> 元素是对 identifier 属性的声明。<literal>name=\"id\"</literal> 映射属性声明了 JavaBean 属性的名称并告诉 Hibernate 使用 <literal>getId()</literal> 和 <literal>setId()</literal> 方法来访问这个属性。column 属性告诉 Hibernate <literal>EVENTS</literal> 表的哪个字段持有主键值。"
#. Tag: para
#, no-c-format
-msgid ""
-"The nested <literal>generator</literal> element specifies the identifier "
-"generation strategy (aka how are identifier values generated?). In this case "
-"we choose <literal>native</literal>, which offers a level of portability "
-"depending on the configured database dialect. Hibernate supports database "
-"generated, globally unique, as well as application assigned, identifiers. "
-"Identifier value generation is also one of Hibernate's many extension points "
-"and you can plugin in your own strategy."
-msgstr ""
-"嵌套的 <literal>generator</literal> 元素指定标识符的生成策略(也就是标识符值"
-"是怎么产生的)。在这个例子里,我们选择 <literal>native</literal>,它提供了取"
-"决于数据库方言的可移植性。Hibernate 数据库生成的、全局性唯一的以及应用程序分"
-"配的标识符。标识符值的生成也是 Hibernate 的扩展功能之一,你可以插入自己的策"
-"略。"
+msgid "The nested <literal>generator</literal> element specifies the identifier generation strategy (aka how are identifier values generated?). In this case we choose <literal>native</literal>, which offers a level of portability depending on the configured database dialect. Hibernate supports database generated, globally unique, as well as application assigned, identifiers. Identifier value generation is also one of Hibernate's many extension points and you can plugin in your own strategy."
+msgstr "嵌套的 <literal>generator</literal> 元素指定标识符的生成策略(也就是标识符值是怎么产生的)。在这个例子里,我们选择 <literal>native</literal>,它提供了取决于数据库方言的可移植性。Hibernate 数据库生成的、全局性唯一的以及应用程序分配的标识符。标识符值的生成也是 Hibernate 的扩展功能之一,你可以插入自己的策略。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"<literal>native</literal> is no longer consider the best strategy in terms "
-"of portability. for further discussion, see <xref linkend=\"portability-idgen"
-"\" />"
-msgstr ""
-"从移植性来说,<literal>native</literal> 不再被认为是最好的策略。进一步的讨"
-"论,请参考 <xref linkend=\"portability-idgen\" />。"
+#, no-c-format
+msgid "<literal>native</literal> is no longer consider the best strategy in terms of portability. for further discussion, see <xref linkend=\"portability-idgen\" />"
+msgstr "从移植性来说,<literal>native</literal> 不再被认为是最好的策略。进一步的讨论,请参考 <xref linkend=\"portability-idgen\" />。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Lastly, we need to tell Hibernate about the remaining entity class "
-"properties. By default, no properties of the class are considered persistent:"
-msgstr ""
-"最后我们在映射文件里面包含需要持久化属性的声明。默认情况下,类里面的属性都被"
-"视为非持久化的: "
+msgid "Lastly, we need to tell Hibernate about the remaining entity class properties. By default, no properties of the class are considered persistent:"
+msgstr "最后我们在映射文件里面包含需要持久化属性的声明。默认情况下,类里面的属性都被视为非持久化的: "
#. Tag: para
#, no-c-format
-msgid ""
-"Similar to the <literal>id</literal> element, the <literal>name</literal> "
-"attribute of the <literal>property</literal> element tells Hibernate which "
-"getter and setter methods to use. In this case, Hibernate will search for "
-"<literal>getDate()</literal>, <literal>setDate()</literal>, <literal>getTitle"
-"()</literal> and <literal>setTitle()</literal> methods."
-msgstr ""
-"和 <literal>id</literal> 元素一样,<literal>property</literal> 元素的 "
-"<literal>name</literal> 属性告诉 Hibernate 使用哪个 getter 和 setter 方法。在"
-"此例中,Hibernate 会寻找 <literal>getDate()</literal>、<literal>setDate()</"
-"literal>、<literal>getTitle()</literal> 和 <literal>setTitle()</literal> 方"
-"法。 "
+msgid "Similar to the <literal>id</literal> element, the <literal>name</literal> attribute of the <literal>property</literal> element tells Hibernate which getter and setter methods to use. In this case, Hibernate will search for <literal>getDate()</literal>, <literal>setDate()</literal>, <literal>getTitle()</literal> and <literal>setTitle()</literal> methods."
+msgstr "和 <literal>id</literal> 元素一样,<literal>property</literal> 元素的 <literal>name</literal> 属性告诉 Hibernate 使用哪个 getter 和 setter 方法。在此例中,Hibernate 会寻找 <literal>getDate()</literal>、<literal>setDate()</literal>、<literal>getTitle()</literal> 和 <literal>setTitle()</literal> 方法。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Why does the <literal>date</literal> property mapping include the "
-"<literal>column</literal> attribute, but the <literal>title</literal> does "
-"not? Without the <literal>column</literal> attribute, Hibernate by default "
-"uses the property name as the column name. This works for <literal>title</"
-"literal>, however, <literal>date</literal> is a reserved keyword in most "
-"databases so you will need to map it to a different name."
-msgstr ""
-"为什么 <literal>date</literal> 属性的映射含有 <literal>column</literal> "
-"attribute,而 <literal>title</literal> 却没有?当没有设定 <literal>column</"
-"literal> attribute 的时候,Hibernate 缺省地使用 JavaBean 的属性名作为字段名。"
-"对于 <literal>title</literal>,这样工作得很好。然而,<literal>date</literal> "
-"在多数的数据库里,是一个保留关键字,所以我们最好把它映射成一个不同的名字。 "
+msgid "Why does the <literal>date</literal> property mapping include the <literal>column</literal> attribute, but the <literal>title</literal> does not? Without the <literal>column</literal> attribute, Hibernate by default uses the property name as the column name. This works for <literal>title</literal>, however, <literal>date</literal> is a reserved keyword in most databases so you will need to map it to a different name."
+msgstr "为什么 <literal>date</literal> 属性的映射含有 <literal>column</literal> attribute,而 <literal>title</literal> 却没有?当没有设定 <literal>column</literal> attribute 的时候,Hibernate 缺省地使用 JavaBean 的属性名作为字段名。对于 <literal>title</literal>,这样工作得很好。然而,<literal>date</literal> 在多数的数据库里,是一个保留关键字,所以我们最好把它映射成一个不同的名字。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>title</literal> mapping also lacks a <literal>type</literal> "
-"attribute. The types declared and used in the mapping files are not Java "
-"data types; they are not SQL database types either. These types are called "
-"<emphasis>Hibernate mapping types</emphasis>, converters which can translate "
-"from Java to SQL data types and vice versa. Again, Hibernate will try to "
-"determine the correct conversion and mapping type itself if the "
-"<literal>type</literal> attribute is not present in the mapping. In some "
-"cases this automatic detection using Reflection on the Java class might not "
-"have the default you expect or need. This is the case with the "
-"<literal>date</literal> property. Hibernate cannot know if the property, "
-"which is of <literal>java.util.Date</literal>, should map to a SQL "
-"<literal>date</literal>, <literal>timestamp</literal>, or <literal>time</"
-"literal> column. Full date and time information is preserved by mapping the "
-"property with a <literal>timestamp</literal> converter."
-msgstr ""
-"另一有趣的事情是 <literal>title</literal> 属性缺少一个 <literal>type</"
-"literal> attribute。我们在映射文件里声明并使用的类型,却不是我们期望的那样,"
-"是 Java 数据类型,同时也不是 SQL 数据库的数据类型。这些类型就是所谓的 "
-"Hibernate 映射类型<emphasis>(mapping types)</emphasis>,它们能把 Java 数据"
-"类型转换到 SQL 数据类型,反之亦然。再次重申,如果在映射文件中没有设置 "
-"<literal>type</literal> 属性的话,Hibernate 会自己试着去确定正确的转换类型和"
-"它的映射类型。在某些情况下这个自动检测机制(在 Java 类上使用反射机制)不会产"
-"生你所期待或需要的缺省值。<literal>date</literal> 属性就是个很好的例子,"
-"Hibernate 无法知道这个属性(<literal>java.util.Date</literal> 类型的)应该被"
-"映射成:SQL <literal>date</literal>,或 <literal>timestamp</literal>,还是 "
-"<literal>time</literal> 字段。在此例中,把这个属性映射成 <literal>timestamp</"
-"literal> 转换器,这样我们预留了日期和时间的全部信息。 "
+msgid "The <literal>title</literal> mapping also lacks a <literal>type</literal> attribute. The types declared and used in the mapping files are not Java data types; they are not SQL database types either. These types are called <emphasis>Hibernate mapping types</emphasis>, converters which can translate from Java to SQL data types and vice versa. Again, Hibernate will try to determine the correct conversion and mapping type itself if the <literal>type</literal> attribute is not present in the mapping. In some cases this automatic detection using Reflection on the Java class might not have the default you expect or need. This is the case with the <literal>date</literal> property. Hibernate cannot know if the property, which is of <literal>java.util.Date</literal>, should map to a SQL <literal>date</literal>, <literal>timestamp</literal>, or <literal>time</literal> column. Full date and time information is preserved by mapping the property with a <literal>timestamp</literal>!
converter."
+msgstr "另一有趣的事情是 <literal>title</literal> 属性缺少一个 <literal>type</literal> attribute。我们在映射文件里声明并使用的类型,却不是我们期望的那样,是 Java 数据类型,同时也不是 SQL 数据库的数据类型。这些类型就是所谓的 Hibernate 映射类型<emphasis>(mapping types)</emphasis>,它们能把 Java 数据类型转换到 SQL 数据类型,反之亦然。再次重申,如果在映射文件中没有设置 <literal>type</literal> 属性的话,Hibernate 会自己试着去确定正确的转换类型和它的映射类型。在某些情况下这个自动检测机制(在 Java 类上使用反射机制)不会产生你所期待或需要的缺省值。<literal>date</literal> 属性就是个很好的例子,Hibernate 无法知道这个属性(<literal>java.util.Date</literal> 类型的)应该被映射成:SQL <literal>date</literal>,或 <literal>timestamp</literal>,还是 <literal>time</litera!
l> 字段。在此例中,把这个属性映射成 <literal>timestamp</literal> 转换器,这样我们预留了日期和时间的全部信息。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate makes this mapping type determination using reflection when the "
-"mapping files are processed. This can take time and resources, so if startup "
-"performance is important you should consider explicitly defining the type to "
-"use."
-msgstr ""
-"当处理映射文件时,Hibernate 用反射(reflection)来决定这个映射类型。这需要时"
-"间和资源,所以如果你注重启动性能,你应该考虑显性地定义所用的类型。"
+msgid "Hibernate makes this mapping type determination using reflection when the mapping files are processed. This can take time and resources, so if startup performance is important you should consider explicitly defining the type to use."
+msgstr "当处理映射文件时,Hibernate 用反射(reflection)来决定这个映射类型。这需要时间和资源,所以如果你注重启动性能,你应该考虑显性地定义所用的类型。"
#. Tag: para
#, no-c-format
-msgid ""
-"Save this mapping file as <filename>src/main/resources/org/hibernate/"
-"tutorial/domain/Event.hbm.xml</filename>."
-msgstr ""
-"把这个映射文件保存为 <filename>src/main/resources/org/hibernate/tutorial/"
-"domain/Event.hbm.xml</filename>。"
+msgid "Save this mapping file as <filename>src/main/resources/org/hibernate/tutorial/domain/Event.hbm.xml</filename>."
+msgstr "把这个映射文件保存为 <filename>src/main/resources/org/hibernate/tutorial/domain/Event.hbm.xml</filename>。"
#. Tag: title
#, no-c-format
@@ -443,76 +192,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"At this point, you should have the persistent class and its mapping file in "
-"place. It is now time to configure Hibernate. First let's set up HSQLDB to "
-"run in \"server mode\""
-msgstr ""
-"此时,你应该有了持久化类和它的映射文件。现在是配置 Hibernate 的时候了。首先让"
-"我们设立 HSQLDB 使其运行在“服务器模式”。"
+msgid "At this point, you should have the persistent class and its mapping file in place. It is now time to configure Hibernate. First let's set up HSQLDB to run in \"server mode\""
+msgstr "此时,你应该有了持久化类和它的映射文件。现在是配置 Hibernate 的时候了。首先让我们设立 HSQLDB 使其运行在“服务器模式”。"
#. Tag: para
-#, fuzzy, no-c-format
+#, no-c-format
msgid "We do this do that the data remains between runs."
-msgstr "数据在程序运行期间需要保持有效。"
+msgstr "数据在程序运行期间需要保持有效。 "
#. Tag: para
#, no-c-format
-msgid ""
-"We will utilize the Maven exec plugin to launch the HSQLDB server by "
-"running: <command> mvn exec:java -Dexec.mainClass=\"org.hsqldb.Server\" -"
-"Dexec.args=\"-database.0 file:target/data/tutorial\"</command> You will see "
-"it start up and bind to a TCP/IP socket; this is where our application will "
-"connect later. If you want to start with a fresh database during this "
-"tutorial, shutdown HSQLDB, delete all files in the <filename>target/data</"
-"filename> directory, and start HSQLDB again."
-msgstr ""
-"在开发的根目录下创建一个 <literal>data</literal> 目录 - 这是 HSQL DB 存储数"
-"据文件的地方。此时在 data 目录中运行 <literal>java -classpath ../lib/hsqldb."
-"jar org.hsqldb.Server</literal> 就可启动数据库。你可以在 log 中看到它的启动,"
-"及绑定到 TCP/IP 套接字,这正是我们的应用程序稍后会连接的地方。如果你希望在本"
-"例中运行一个全新的数据库,就在窗口中按下 <literal>CTRL + C</literal> 来关闭 "
-"HSQL 数据库,并删除 <literal>data/</literal> 目录下的所有文件,再重新启动 "
-"HSQL 数据库。 "
+msgid "We will utilize the Maven exec plugin to launch the HSQLDB server by running: <command> mvn exec:java -Dexec.mainClass=\"org.hsqldb.Server\" -Dexec.args=\"-database.0 file:target/data/tutorial\"</command> You will see it start up and bind to a TCP/IP socket; this is where our application will connect later. If you want to start with a fresh database during this tutorial, shutdown HSQLDB, delete all files in the <filename>target/data</filename> directory, and start HSQLDB again."
+msgstr "在开发的根目录下创建一个 <literal>data</literal> 目录 - 这是 HSQL DB 存储数据文件的地方。此时在 data 目录中运行 <literal>java -classpath ../lib/hsqldb.jar org.hsqldb.Server</literal> 就可启动数据库。你可以在 log 中看到它的启动,及绑定到 TCP/IP 套接字,这正是我们的应用程序稍后会连接的地方。如果你希望在本例中运行一个全新的数据库,就在窗口中按下 <literal>CTRL + C</literal> 来关闭 HSQL 数据库,并删除 <literal>data/</literal> 目录下的所有文件,再重新启动 HSQL 数据库。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate will be connecting to the database on behalf of your application, "
-"so it needs to know how to obtain connections. For this tutorial we will be "
-"using a standalone connection pool (as opposed to a <interfacename>javax.sql."
-"DataSource</interfacename>). Hibernate comes with support for two third-"
-"party open source JDBC connection pools: <ulink url=\"https://sourceforge."
-"net/projects/c3p0\">c3p0</ulink> and <ulink url=\"http://proxool.sourceforge."
-"net/\">proxool</ulink>. However, we will be using the Hibernate built-in "
-"connection pool for this tutorial."
-msgstr ""
-"Hibernate 将为你的应用程序连接到数据库,所以它需要知道如何获取连接。在这个教"
-"程里,我们使用一个独立连接池(和 <interfacename>javax.sql.DataSource</"
-"interfacename> 相反)。Hibernate 支持两个第三方的开源 JDBC 连接池:<ulink "
-"url=\"https://sourceforge.net/projects/c3p0\">c3p0</ulink> 和 <ulink url="
-"\"http://proxool.sourceforge.net/\">proxool</ulink>。然而,在本教程里我们将使"
-"用 Hibernate 内置的连接池。"
+msgid "Hibernate will be connecting to the database on behalf of your application, so it needs to know how to obtain connections. For this tutorial we will be using a standalone connection pool (as opposed to a <interfacename>javax.sql.DataSource</interfacename>). Hibernate comes with support for two third-party open source JDBC connection pools: <ulink url=\"https://sourceforge.net/projects/c3p0\">c3p0</ulink> and <ulink url=\"http://proxool.sourceforge.net/\">proxool</ulink>. However, we will be using the Hibernate built-in connection pool for this tutorial."
+msgstr "Hibernate 将为你的应用程序连接到数据库,所以它需要知道如何获取连接。在这个教程里,我们使用一个独立连接池(和 <interfacename>javax.sql.DataSource</interfacename> 相反)。Hibernate 支持两个第三方的开源 JDBC 连接池:<ulink url=\"https://sourceforge.net/projects/c3p0\">c3p0</ulink> 和 <ulink url=\"http://proxool.sourceforge.net/\">proxool</ulink>。然而,在本教程里我们将使用 Hibernate 内置的连接池。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"The built-in Hibernate connection pool is in no way intended for production "
-"use. It lacks several features found on any decent connection pool."
-msgstr "嵌入的 Hibernate 连接池不用于产品环境。"
+#, no-c-format
+msgid "The built-in Hibernate connection pool is in no way intended for production use. It lacks several features found on any decent connection pool."
+msgstr "嵌入的 Hibernate 连接池不用于产品环境。它缺乏连接池里的几个功能。"
#. Tag: para
#, no-c-format
-msgid ""
-"For Hibernate's configuration, we can use a simple <literal>hibernate."
-"properties</literal> file, a more sophisticated <literal>hibernate.cfg.xml</"
-"literal> file, or even complete programmatic setup. Most users prefer the "
-"XML configuration file:"
-msgstr ""
-"为了保存 Hibernate 的配置,我们可以使用一个简单的 <literal>hibernate."
-"properties</literal> 文件,或者一个稍微复杂的 <literal>hibernate.cfg.xml</"
-"literal>,甚至可以完全使用程序来配置 Hibernate。多数用户更喜欢使用 XML 配置文"
-"件: "
+msgid "For Hibernate's configuration, we can use a simple <literal>hibernate.properties</literal> file, a more sophisticated <literal>hibernate.cfg.xml</literal> file, or even complete programmatic setup. Most users prefer the XML configuration file:"
+msgstr "为了保存 Hibernate 的配置,我们可以使用一个简单的 <literal>hibernate.properties</literal> 文件,或者一个稍微复杂的 <literal>hibernate.cfg.xml</literal>,甚至可以完全使用程序来配置 Hibernate。多数用户更喜欢使用 XML 配置文件: "
#. Tag: para
#, no-c-format
@@ -521,63 +227,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You configure Hibernate's <literal>SessionFactory</literal>. SessionFactory "
-"is a global factory responsible for a particular database. If you have "
-"several databases, for easier startup you should use several <literal><"
-"session-factory></literal> configurations in several configuration files."
-msgstr ""
-"注意这个 XML 配置使用了一个不同的 DTD。在这里,我们配置了 Hibernate 的"
-"<literal>SessionFactory</literal> — 一个关联于特定数据库全局的工厂"
-"(factory)。如果你要使用多个数据库,就要用多个的 <literal><session-"
-"factory></literal>,通常把它们放在多个配置文件中(为了更容易启动)。 "
+msgid "You configure Hibernate's <literal>SessionFactory</literal>. SessionFactory is a global factory responsible for a particular database. If you have several databases, for easier startup you should use several <literal><session-factory></literal> configurations in several configuration files."
+msgstr "注意这个 XML 配置使用了一个不同的 DTD。在这里,我们配置了 Hibernate 的<literal>SessionFactory</literal> — 一个关联于特定数据库全局的工厂(factory)。如果你要使用多个数据库,就要用多个的 <literal><session-factory></literal>,通常把它们放在多个配置文件中(为了更容易启动)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The first four <literal>property</literal> elements contain the necessary "
-"configuration for the JDBC connection. The dialect <literal>property</"
-"literal> element specifies the particular SQL variant Hibernate generates."
-msgstr ""
-"签名 4 个 <literal>property</literal> 元素包含了 JDBC 连接所必需的配置。方言 "
-"<literal>property</literal> 元素指定了 Hibernate 生成的特定 SQL 语句。"
+msgid "The first four <literal>property</literal> elements contain the necessary configuration for the JDBC connection. The dialect <literal>property</literal> element specifies the particular SQL variant Hibernate generates."
+msgstr "签名 4 个 <literal>property</literal> 元素包含了 JDBC 连接所必需的配置。方言 <literal>property</literal> 元素指定了 Hibernate 生成的特定 SQL 语句。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"In most cases, Hibernate is able to properly determine which dialect to use. "
-"See <xref linkend=\"portability-dialectresolver\" /> for more information."
-msgstr ""
-"在大多数情况下,Hibernate 都能够正确地决定所使用的方言。更多信息请参考 <xref "
-"linkend=\"portability-dialectresolver\" />。"
+#, no-c-format
+msgid "In most cases, Hibernate is able to properly determine which dialect to use. See <xref linkend=\"portability-dialectresolver\" /> for more information."
+msgstr "在大多数情况下,Hibernate 都能够正确地决定所使用的方言。更多信息请参考 <xref linkend=\"portability-dialectresolver\" />。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate's automatic session management for persistence contexts is "
-"particularly useful in this context. The <literal>hbm2ddl.auto</literal> "
-"option turns on automatic generation of database schemas directly into the "
-"database. This can also be turned off by removing the configuration option, "
-"or redirected to a file with the help of the <literal>SchemaExport</literal> "
-"Ant task. Finally, add the mapping file(s) for persistent classes to the "
-"configuration."
-msgstr ""
-"最开始的 4 个 <literal>property</literal> 元素包含必要的 JDBC 连接信息。方言"
-"(dialect)的 <literal>property</literal> 元素指明 Hibernate 生成的特定 SQL "
-"变量。你很快会看到,Hibernate 对持久化上下文的自动 session 管理就会派上用"
-"场。 打开 <literal>hbm2ddl.auto</literal> 选项将自动生成数据库模式(schema)"
-"- 直接加入数据库中。当然这个选项也可以被关闭(通过去除这个配置选项)或者通"
-"过 Ant 任务 <literal>SchemaExport</literal> 的帮助来把数据库 schema 重定向到"
-"文件中。最后,在配置中为持久化类加入映射文件。 "
+msgid "Hibernate's automatic session management for persistence contexts is particularly useful in this context. The <literal>hbm2ddl.auto</literal> option turns on automatic generation of database schemas directly into the database. This can also be turned off by removing the configuration option, or redirected to a file with the help of the <literal>SchemaExport</literal> Ant task. Finally, add the mapping file(s) for persistent classes to the configuration."
+msgstr "最开始的 4 个 <literal>property</literal> 元素包含必要的 JDBC 连接信息。方言(dialect)的 <literal>property</literal> 元素指明 Hibernate 生成的特定 SQL 变量。你很快会看到,Hibernate 对持久化上下文的自动 session 管理就会派上用场。 打开 <literal>hbm2ddl.auto</literal> 选项将自动生成数据库模式(schema)- 直接加入数据库中。当然这个选项也可以被关闭(通过去除这个配置选项)或者通过 Ant 任务 <literal>SchemaExport</literal> 的帮助来把数据库 schema 重定向到文件中。最后,在配置中为持久化类加入映射文件。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Save this file as <filename>hibernate.cfg.xml</filename> into the "
-"<filename>src/main/resources</filename> directory."
-msgstr ""
-"把这个文件保存为 <filename>src/main/resources</filename> 目录下的 "
-"<filename>hibernate.cfg.xml</filename>。"
+msgid "Save this file as <filename>hibernate.cfg.xml</filename> into the <filename>src/main/resources</filename> directory."
+msgstr "把这个文件保存为 <filename>src/main/resources</filename> 目录下的 <filename>hibernate.cfg.xml</filename>。"
#. Tag: title
#, no-c-format
@@ -586,19 +257,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"We will now build the tutorial with Maven. You will need to have Maven "
-"installed; it is available from the <ulink url=\"http://maven.apache.org/"
-"download.html\">Maven download page</ulink>. Maven will read the <filename>/"
-"pom.xml</filename> file we created earlier and know how to perform some "
-"basic project tasks. First, lets run the <literal>compile</literal> goal to "
-"make sure we can compile everything so far:"
-msgstr ""
-"我们将用 Maven 构建这个教程。你将需要安装 Maven;你可以从 <ulink url="
-"\"http://maven.apache.org/download.html\">Maven 下载页面</ulink>获得 Maven。"
-"Maen 将读取我们先前创建的 <filename>/pom.xml</filename> 并知道执行基本的项目"
-"任务。首先,让我们运行 <literal>compile</literal> 目标来确保我们可以编译到目"
-"前为止的所有程序:"
+msgid "We will now build the tutorial with Maven. You will need to have Maven installed; it is available from the <ulink url=\"http://maven.apache.org/download.html\">Maven download page</ulink>. Maven will read the <filename>/pom.xml</filename> file we created earlier and know how to perform some basic project tasks. First, lets run the <literal>compile</literal> goal to make sure we can compile everything so far:"
+msgstr "我们将用 Maven 构建这个教程。你将需要安装 Maven;你可以从 <ulink url=\"http://maven.apache.org/download.html\">Maven 下载页面</ulink>获得 Maven。Maen 将读取我们先前创建的 <filename>/pom.xml</filename> 并知道执行基本的项目任务。首先,让我们运行 <literal>compile</literal> 目标来确保我们可以编译到目前为止的所有程序:"
#. Tag: title
#, no-c-format
@@ -607,102 +267,37 @@
#. Tag: para
#, no-c-format
-msgid ""
-"It is time to load and store some <literal>Event</literal> objects, but "
-"first you have to complete the setup with some infrastructure code. You have "
-"to startup Hibernate by building a global <interfacename>org.hibernate."
-"SessionFactory</interfacename> object and storing it somewhere for easy "
-"access in application code. A <interfacename>org.hibernate.SessionFactory</"
-"interfacename> is used to obtain <interfacename>org.hibernate.Session</"
-"interfacename> instances. A <interfacename>org.hibernate.Session</"
-"interfacename> represents a single-threaded unit of work. The "
-"<interfacename>org.hibernate.SessionFactory</interfacename> is a thread-safe "
-"global object that is instantiated once."
-msgstr ""
-"是时候来加载和储存一些 <literal>Event</literal> 对象了,但首先我们得编写一些"
-"基础的代码以完成设置。我们必须启动 Hibernate,此过程包括创建一个全局的 "
-"<literal>SessoinFactory</literal>,并把它储存在应用程序代码容易访问的地方。"
-"<literal>SessionFactory</literal> 可以创建并打开新的 <literal>Session</"
-"literal>。一个 <literal>Session</literal> 代表一个单线程的单元操作,"
-"<interfacename>org.hibernate.SessionFactory</interfacename> 则是个线程安全的"
-"全局对象,只需要被实例化一次。 "
+msgid "It is time to load and store some <literal>Event</literal> objects, but first you have to complete the setup with some infrastructure code. You have to startup Hibernate by building a global <interfacename>org.hibernate.SessionFactory</interfacename> object and storing it somewhere for easy access in application code. A <interfacename>org.hibernate.SessionFactory</interfacename> is used to obtain <interfacename>org.hibernate.Session</interfacename> instances. A <interfacename>org.hibernate.Session</interfacename> represents a single-threaded unit of work. The <interfacename>org.hibernate.SessionFactory</interfacename> is a thread-safe global object that is instantiated once."
+msgstr "是时候来加载和储存一些 <literal>Event</literal> 对象了,但首先我们得编写一些基础的代码以完成设置。我们必须启动 Hibernate,此过程包括创建一个全局的 <literal>SessoinFactory</literal>,并把它储存在应用程序代码容易访问的地方。<literal>SessionFactory</literal> 可以创建并打开新的 <literal>Session</literal>。一个 <literal>Session</literal> 代表一个单线程的单元操作,<interfacename>org.hibernate.SessionFactory</interfacename> 则是个线程安全的全局对象,只需要被实例化一次。 "
#. Tag: para
#, no-c-format
-msgid ""
-"We will create a <literal>HibernateUtil</literal> helper class that takes "
-"care of startup and makes accessing the <interfacename>org.hibernate."
-"SessionFactory</interfacename> more convenient."
-msgstr ""
-"我们将创建一个 <literal>HibernateUtil</literal> 辅助类(helper class)来负责"
-"启动 Hibernate 和更方便地操作 <interfacename>org.hibernate.SessionFactory</"
-"interfacename>。让我们来看一下它的实现:"
+msgid "We will create a <literal>HibernateUtil</literal> helper class that takes care of startup and makes accessing the <interfacename>org.hibernate.SessionFactory</interfacename> more convenient."
+msgstr "我们将创建一个 <literal>HibernateUtil</literal> 辅助类(helper class)来负责启动 Hibernate 和更方便地操作 <interfacename>org.hibernate.SessionFactory</interfacename>。让我们来看一下它的实现:"
#. Tag: para
#, no-c-format
-msgid ""
-"Save this code as <filename>src/main/java/org/hibernate/tutorial/util/"
-"HibernateUtil.java</filename>"
-msgstr ""
-"把这段代码保存为 <filename>src/main/java/org/hibernate/tutorial/util/"
-"HibernateUtil.java</filename>。"
+msgid "Save this code as <filename>src/main/java/org/hibernate/tutorial/util/HibernateUtil.java</filename>"
+msgstr "把这段代码保存为 <filename>src/main/java/org/hibernate/tutorial/util/HibernateUtil.java</filename>。"
#. Tag: para
#, no-c-format
-msgid ""
-"This class not only produces the global <interfacename>org.hibernate."
-"SessionFactory</interfacename> reference in its static initializer; it also "
-"hides the fact that it uses a static singleton. We might just as well have "
-"looked up the <interfacename>org.hibernate.SessionFactory</interfacename> "
-"reference from JNDI in an application server or any other location for that "
-"matter."
-msgstr ""
-"这个类不但在它的静态初始化过程(仅当加载这个类的时候被 JVM 执行一次)中产生全"
-"局的 <interfacename>org.hibernate.SessionFactory</interfacename>,而且隐藏了"
-"它使用了静态 singleton 的事实。它也可能在应用程序服务器中的 JNDI 查找 "
-"<interfacename>org.hibernate.SessionFactory</interfacename>。"
+msgid "This class not only produces the global <interfacename>org.hibernate.SessionFactory</interfacename> reference in its static initializer; it also hides the fact that it uses a static singleton. We might just as well have looked up the <interfacename>org.hibernate.SessionFactory</interfacename> reference from JNDI in an application server or any other location for that matter."
+msgstr "这个类不但在它的静态初始化过程(仅当加载这个类的时候被 JVM 执行一次)中产生全局的 <interfacename>org.hibernate.SessionFactory</interfacename>,而且隐藏了它使用了静态 singleton 的事实。它也可能在应用程序服务器中的 JNDI 查找 <interfacename>org.hibernate.SessionFactory</interfacename>。"
#. Tag: para
#, no-c-format
-msgid ""
-"If you give the <interfacename>org.hibernate.SessionFactory</interfacename> "
-"a name in your configuration, Hibernate will try to bind it to JNDI under "
-"that name after it has been built. Another, better option is to use a JMX "
-"deployment and let the JMX-capable container instantiate and bind a "
-"<literal>HibernateService</literal> to JNDI. Such advanced options are "
-"discussed later."
-msgstr ""
-"如果你在配置文件中给 <interfacename>org.hibernate.SessionFactory</"
-"interfacename> 一个名字,在 它创建后,Hibernate 会试着把它绑定到 JNDI。要完全"
-"避免这样的代码,你也可以使用 JMX 部署,让具有 JMX 能力的容器来实例化 "
-"<literal>HibernateService</literal> 并把它绑定到 JNDI。这些高级可选项在后面的"
-"章节中会讨论到。"
+msgid "If you give the <interfacename>org.hibernate.SessionFactory</interfacename> a name in your configuration, Hibernate will try to bind it to JNDI under that name after it has been built. Another, better option is to use a JMX deployment and let the JMX-capable container instantiate and bind a <literal>HibernateService</literal> to JNDI. Such advanced options are discussed later."
+msgstr "如果你在配置文件中给 <interfacename>org.hibernate.SessionFactory</interfacename> 一个名字,在 它创建后,Hibernate 会试着把它绑定到 JNDI。要完全避免这样的代码,你也可以使用 JMX 部署,让具有 JMX 能力的容器来实例化 <literal>HibernateService</literal> 并把它绑定到 JNDI。这些高级可选项在后面的章节中会讨论到。"
#. Tag: para
#, no-c-format
-msgid ""
-"You now need to configure a logging system. Hibernate uses commons logging "
-"and provides two choices: Log4j and JDK 1.4 logging. Most developers prefer "
-"Log4j: copy <literal>log4j.properties</literal> from the Hibernate "
-"distribution in the <literal>etc/</literal> directory to your <literal>src</"
-"literal> directory, next to <literal>hibernate.cfg.xml</literal>. If you "
-"prefer to have more verbose output than that provided in the example "
-"configuration, you can change the settings. By default, only the Hibernate "
-"startup message is shown on stdout."
-msgstr ""
-"再次编译这个应用程序应该不会有问题。最后我们需要配置一个日志(logging)系统 — "
-"Hibernate 使用通用日志接口,允许你在 Log4j 和 JDK 1.4 日志之间进行选择。多数"
-"开发者更喜欢 Log4j:从 Hibernate 的发布包中(它在 <literal>etc/</literal> 目"
-"录下)拷贝 <literal>log4j.properties</literal> 到你的 <literal>src</literal> "
-"目录,与 <literal>hibernate.cfg.xml</literal> 放在一起。看一下配置示例,如果"
-"你希望看到更加详细的输出信息,你可以修改配置。默认情况下,只有 Hibernate 的启"
-"动信息才会显示在标准输出上。 "
+msgid "You now need to configure a logging system. Hibernate uses commons logging and provides two choices: Log4j and JDK 1.4 logging. Most developers prefer Log4j: copy <literal>log4j.properties</literal> from the Hibernate distribution in the <literal>etc/</literal> directory to your <literal>src</literal> directory, next to <literal>hibernate.cfg.xml</literal>. If you prefer to have more verbose output than that provided in the example configuration, you can change the settings. By default, only the Hibernate startup message is shown on stdout."
+msgstr "再次编译这个应用程序应该不会有问题。最后我们需要配置一个日志(logging)系统 — Hibernate 使用通用日志接口,允许你在 Log4j 和 JDK 1.4 日志之间进行选择。多数开发者更喜欢 Log4j:从 Hibernate 的发布包中(它在 <literal>etc/</literal> 目录下)拷贝 <literal>log4j.properties</literal> 到你的 <literal>src</literal> 目录,与 <literal>hibernate.cfg.xml</literal> 放在一起。看一下配置示例,如果你希望看到更加详细的输出信息,你可以修改配置。默认情况下,只有 Hibernate 的启动信息才会显示在标准输出上。 "
#. Tag: para
#, no-c-format
-msgid ""
-"The tutorial infrastructure is complete and you are now ready to do some "
-"real work with Hibernate."
+msgid "The tutorial infrastructure is complete and you are now ready to do some real work with Hibernate."
msgstr "示例的基本框架完成了 — 现在我们可以用 Hibernate 来做些真正的工作。 "
#. Tag: title
@@ -712,140 +307,48 @@
#. Tag: para
#, no-c-format
-msgid ""
-"We are now ready to start doing some real worjk with Hibernate. Let's start "
-"by writing an <literal>EventManager</literal> class with a <literal>main()</"
-"literal> method:"
-msgstr ""
-"我们终于可以使用 Hibernate 来加载和存储对象了,编写一个带有 <literal>main()</"
-"literal> 方法的 <literal>EventManager</literal> 类: "
+msgid "We are now ready to start doing some real worjk with Hibernate. Let's start by writing an <literal>EventManager</literal> class with a <literal>main()</literal> method:"
+msgstr "我们终于可以使用 Hibernate 来加载和存储对象了,编写一个带有 <literal>main()</literal> 方法的 <literal>EventManager</literal> 类: "
#. Tag: para
#, no-c-format
-msgid ""
-"In <literal>createAndStoreEvent()</literal> we created a new <literal>Event</"
-"literal> object and handed it over to Hibernate. At that point, Hibernate "
-"takes care of the SQL and executes an <literal>INSERT</literal> on the "
-"database."
-msgstr ""
-"在 <literal>createAndStoreEvent()</literal> 来里我们创建了一个新的 "
-"<literal>Event</literal> 对象并把它传递给 Hibernate。现在 Hibernate 负责与 "
-"SQL 打交道,并把 <literal>INSERT</literal> 命令传给数据库。"
+msgid "In <literal>createAndStoreEvent()</literal> we created a new <literal>Event</literal> object and handed it over to Hibernate. At that point, Hibernate takes care of the SQL and executes an <literal>INSERT</literal> on the database."
+msgstr "在 <literal>createAndStoreEvent()</literal> 来里我们创建了一个新的 <literal>Event</literal> 对象并把它传递给 Hibernate。现在 Hibernate 负责与 SQL 打交道,并把 <literal>INSERT</literal> 命令传给数据库。"
#. Tag: para
#, no-c-format
-msgid ""
-"A <interface>org.hibernate.Session</interface> is designed to represent a "
-"single unit of work (a single atmoic piece of work to be performed). For now "
-"we will keep things simple and assume a one-to-one granularity between a "
-"Hibernate <interface>org.hibernate.Session</interface> and a database "
-"transaction. To shield our code from the actual underlying transaction "
-"system we use the Hibernate <interfacename>org.hibernate.Transaction</"
-"interfacename> API. In this particular case we are using JDBC-based "
-"transactional semantics, but it could also run with JTA."
-msgstr ""
-"一个 <interface>org.hibernate.Session</interface> 就是个单一的工作单元。我们"
-"暂时让事情简单一些,并假设 Hibernate <interface>org.hibernate.Session</"
-"interface> 和数据库事务是一一对应的。为了让我们的代码从底层的事务系统中脱离出"
-"来(此例中是 JDBC,但也可能是 JTA),我们使用 <interfacename>org.hibernate."
-"Transaction</interfacename> API。在这个例子里我们使用基于 JDBC 的事务性 "
-"semantic,但它也可以和 JTA 一起运行。"
+msgid "A <interface>org.hibernate.Session</interface> is designed to represent a single unit of work (a single atmoic piece of work to be performed). For now we will keep things simple and assume a one-to-one granularity between a Hibernate <interface>org.hibernate.Session</interface> and a database transaction. To shield our code from the actual underlying transaction system we use the Hibernate <interfacename>org.hibernate.Transaction</interfacename> API. In this particular case we are using JDBC-based transactional semantics, but it could also run with JTA."
+msgstr "一个 <interface>org.hibernate.Session</interface> 就是个单一的工作单元。我们暂时让事情简单一些,并假设 Hibernate <interface>org.hibernate.Session</interface> 和数据库事务是一一对应的。为了让我们的代码从底层的事务系统中脱离出来(此例中是 JDBC,但也可能是 JTA),我们使用 <interfacename>org.hibernate.Transaction</interfacename> API。在这个例子里我们使用基于 JDBC 的事务性 semantic,但它也可以和 JTA 一起运行。"
#. Tag: para
#, no-c-format
-msgid ""
-"What does <literal>sessionFactory.getCurrentSession()</literal> do? First, "
-"you can call it as many times and anywhere you like once you get hold of "
-"your <interfacename>org.hibernate.SessionFactory</interfacename>. The "
-"<literal>getCurrentSession()</literal> method always returns the \"current\" "
-"unit of work. Remember that we switched the configuration option for this "
-"mechanism to \"thread\" in our <filename>src/main/resources/hibernate.cfg."
-"xml</filename>? Due to that setting, the context of a current unit of work "
-"is bound to the current Java thread that executes the application."
-msgstr ""
-"<literal>sessionFactory.getCurrentSession()</literal> 是干什么的呢?首先,只"
-"要你持有 <interfacename>org.hibernate.SessionFactory</interfacename>,大可在"
-"任何时候、任何地点调用这个方法。<literal>getCurrentSession()</literal> 方法总"
-"会返回“当前的”工作单元。记得我们在 <filename>src/main/resources/hibernate."
-"cfg.xml</filename> 中把这一配置选项调整为 \"thread\" 了吗?因此,因此,当前工"
-"作单元被绑定到当前执行我们应用程序的 Java 线程。但是,这并非是完全准确的,你还"
-"得考虑工作单元的生命周期范围(scope),它何时开始,又何时结束。"
+msgid "What does <literal>sessionFactory.getCurrentSession()</literal> do? First, you can call it as many times and anywhere you like once you get hold of your <interfacename>org.hibernate.SessionFactory</interfacename>. The <literal>getCurrentSession()</literal> method always returns the \"current\" unit of work. Remember that we switched the configuration option for this mechanism to \"thread\" in our <filename>src/main/resources/hibernate.cfg.xml</filename>? Due to that setting, the context of a current unit of work is bound to the current Java thread that executes the application."
+msgstr "<literal>sessionFactory.getCurrentSession()</literal> 是干什么的呢?首先,只要你持有 <interfacename>org.hibernate.SessionFactory</interfacename>,大可在任何时候、任何地点调用这个方法。<literal>getCurrentSession()</literal> 方法总会返回“当前的”工作单元。记得我们在 <filename>src/main/resources/hibernate.cfg.xml</filename> 中把这一配置选项调整为 \"thread\" 了吗?因此,因此,当前工作单元被绑定到当前执行我们应用程序的 Java 线程。但是,这并非是完全准确的,你还得考虑工作单元的生命周期范围(scope),它何时开始,又何时结束。"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate offers three methods of current session tracking. The \"thread\" "
-"based method is not intended for production use; it is merely useful for "
-"prototyping and tutorials such as this one. Current session tracking is "
-"discussed in more detail later on."
-msgstr ""
-"Hibernate 提供三种跟踪当前会话的方法。基于“线程”的方法不适合于产品环境,它仅"
-"用于 prototyping 和教学用途。后面将更详细地讨论会话跟踪。"
+msgid "Hibernate offers three methods of current session tracking. The \"thread\" based method is not intended for production use; it is merely useful for prototyping and tutorials such as this one. Current session tracking is discussed in more detail later on."
+msgstr "Hibernate 提供三种跟踪当前会话的方法。基于“线程”的方法不适合于产品环境,它仅用于 prototyping 和教学用途。后面将更详细地讨论会话跟踪。"
#. Tag: para
#, no-c-format
-msgid ""
-"A <interface>org.hibernate.Session</interface> begins when the first call to "
-"<literal>getCurrentSession()</literal> is made for the current thread. It is "
-"then bound by Hibernate to the current thread. When the transaction ends, "
-"either through commit or rollback, Hibernate automatically unbinds the "
-"<interface>org.hibernate.Session</interface> from the thread and closes it "
-"for you. If you call <literal>getCurrentSession()</literal> again, you get a "
-"new <interface>org.hibernate.Session</interface> and can start a new unit of "
-"work."
-msgstr ""
-"<interface>org.hibernate.Session</interface> 在第一次被使用的时候,即第一次调"
-"用 <literal>getCurrentSession()</literal> 的时候,其生命周期就开始。然后它被 "
-"Hibernate 绑定到当前线程。当事务结束的时候,不管是提交还是回滚,Hibernate 会"
-"自动把 <interface>org.hibernate.Session</interface> 从当前线程剥离,并且关闭"
-"它。假若你再次调用 <literal>getCurrentSession()</literal>,你会得到一个新的 "
-"<interface>org.hibernate.Session</interface>,并且开始一个新的工作单元。"
+msgid "A <interface>org.hibernate.Session</interface> begins when the first call to <literal>getCurrentSession()</literal> is made for the current thread. It is then bound by Hibernate to the current thread. When the transaction ends, either through commit or rollback, Hibernate automatically unbinds the <interface>org.hibernate.Session</interface> from the thread and closes it for you. If you call <literal>getCurrentSession()</literal> again, you get a new <interface>org.hibernate.Session</interface> and can start a new unit of work."
+msgstr "<interface>org.hibernate.Session</interface> 在第一次被使用的时候,即第一次调用 <literal>getCurrentSession()</literal> 的时候,其生命周期就开始。然后它被 Hibernate 绑定到当前线程。当事务结束的时候,不管是提交还是回滚,Hibernate 会自动把 <interface>org.hibernate.Session</interface> 从当前线程剥离,并且关闭它。假若你再次调用 <literal>getCurrentSession()</literal>,你会得到一个新的 <interface>org.hibernate.Session</interface>,并且开始一个新的工作单元。"
#. Tag: para
#, no-c-format
-msgid ""
-"Related to the unit of work scope, should the Hibernate <interface>org."
-"hibernate.Session</interface> be used to execute one or several database "
-"operations? The above example uses one <interface>org.hibernate.Session</"
-"interface> for one operation. However this is pure coincidence; the example "
-"is just not complex enough to show any other approach. The scope of a "
-"Hibernate <interface>org.hibernate.Session</interface> is flexible but you "
-"should never design your application to use a new Hibernate <interface>org."
-"hibernate.Session</interface> for <emphasis>every</emphasis> database "
-"operation. Even though it is used in the following examples, consider "
-"<emphasis>session-per-operation</emphasis> an anti-pattern. A real web "
-"application is shown later in the tutorial which will help illustrate this."
-msgstr ""
-"和工作单元的生命周期这个话题相关,Hibernate <interface>org.hibernate."
-"Session</interface> 是否被应该用来执行多次数据库操作?上面的例子对每一次操作"
-"使用了一个 <interface>org.hibernate.Session</interface>,这完全是巧合,这个例"
-"子不是很复杂,无法展示其他方式。Hibernate <interface>org.hibernate.Session</"
-"interface> 的生命周期可以很灵活,但是你绝不要把你的应用程序设计成为<emphasis>"
-"每一次</emphasis>数据库操作都用一个新的 Hibernate <interface>org.hibernate."
-"Session</interface>。因此就算下面的例子(它们都很简单)中你可以看到这种用法,"
-"记住<emphasis>每次操作一个 session</emphasis> 是一个反模式。在本教程的后面会"
-"展示一个真正的(web)程序。"
+msgid "Related to the unit of work scope, should the Hibernate <interface>org.hibernate.Session</interface> be used to execute one or several database operations? The above example uses one <interface>org.hibernate.Session</interface> for one operation. However this is pure coincidence; the example is just not complex enough to show any other approach. The scope of a Hibernate <interface>org.hibernate.Session</interface> is flexible but you should never design your application to use a new Hibernate <interface>org.hibernate.Session</interface> for <emphasis>every</emphasis> database operation. Even though it is used in the following examples, consider <emphasis>session-per-operation</emphasis> an anti-pattern. A real web application is shown later in the tutorial which will help illustrate this."
+msgstr "和工作单元的生命周期这个话题相关,Hibernate <interface>org.hibernate.Session</interface> 是否被应该用来执行多次数据库操作?上面的例子对每一次操作使用了一个 <interface>org.hibernate.Session</interface>,这完全是巧合,这个例子不是很复杂,无法展示其他方式。Hibernate <interface>org.hibernate.Session</interface> 的生命周期可以很灵活,但是你绝不要把你的应用程序设计成为<emphasis>每一次</emphasis>数据库操作都用一个新的 Hibernate <interface>org.hibernate.Session</interface>。因此就算下面的例子(它们都很简单)中你可以看到这种用法,记住<emphasis>每次操作一个 session</emphasis> 是一个反模式。在本教程的后面会展示一个真正的(web)程序。"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"See <xref linkend=\"transactions\" /> for more information about transaction "
-"handling and demarcation. The previous example also skipped any error "
-"handling and rollback."
-msgstr ""
-"关于事务处理及事务边界界定的详细信息,请参看 <xref linkend=\"transactions\" /"
-"> 。在上面的例子中,我们也忽略了所有的错误与回滚的处理。"
+#, no-c-format
+msgid "See <xref linkend=\"transactions\" /> for more information about transaction handling and demarcation. The previous example also skipped any error handling and rollback."
+msgstr "关于事务处理及事务边界界定的详细信息,请参看 <xref linkend=\"transactions\" /> 。在上面的例子中,我们也忽略了所有的错误与回滚的处理。 "
#. Tag: para
#, no-c-format
-msgid ""
-"To run this, we will make use of the Maven exec plugin to call our class "
-"with the necessary classpath setup: <command>mvn exec:java -Dexec.mainClass="
-"\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"store\"</command>"
-msgstr ""
-"要运行它,我们将使用 Maven exec 插件以及必要的 classpath 设置来进行调用:"
-"<command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager"
-"\" -Dexec.args=\"store\"</command>。"
+msgid "To run this, we will make use of the Maven exec plugin to call our class with the necessary classpath setup: <command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"store\"</command>"
+msgstr "要运行它,我们将使用 Maven exec 插件以及必要的 classpath 设置来进行调用:<command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"store\"</command>。"
#. Tag: para
#, no-c-format
@@ -854,12 +357,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You should see Hibernate starting up and, depending on your configuration, "
-"lots of log output. Towards the end, the following line will be displayed:"
-msgstr ""
-"你应该会看到,编译以后,Hibernate 根据你的配置启动,并产生一大堆的输出日志。"
-"在日志最后你会看到下面这行: "
+msgid "You should see Hibernate starting up and, depending on your configuration, lots of log output. Towards the end, the following line will be displayed:"
+msgstr "你应该会看到,编译以后,Hibernate 根据你的配置启动,并产生一大堆的输出日志。在日志最后你会看到下面这行: "
#. Tag: para
#, no-c-format
@@ -869,8 +368,7 @@
#. Tag: para
#, no-c-format
msgid "To list stored events an option is added to the main method:"
-msgstr ""
-"我们想要列出所有已经被存储的 events,就要增加一个条件分支选项到 main 方法中:"
+msgstr "我们想要列出所有已经被存储的 events,就要增加一个条件分支选项到 main 方法中:"
#. Tag: para
#, no-c-format
@@ -878,30 +376,14 @@
msgstr "我们也增加一个新的 <literal>listEvents()</literal> 方法: "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"Here, we are using a Hibernate Query Language (HQL) query to load all "
-"existing <literal>Event</literal> objects from the database. Hibernate will "
-"generate the appropriate SQL, send it to the database and populate "
-"<literal>Event</literal> objects with the data. You can create more complex "
-"queries with HQL. See <xref linkend=\"queryhql\" /> for more information."
-msgstr ""
-"我们在这里是用一个 HQL(Hibernate Query Language-Hibernate查询语言)查询语句"
-"来从数据库中加载所有存在的 <literal>Event</literal> 对象。Hibernate 会生成适"
-"当的 SQL,把它发送到数据库,并操作从查询得到数据的 <literal>Event</literal> "
-"对象。当然,你可以使用 HQL 来创建更加复杂的查询。更多信息请参考 <xref "
-"linkend=\"queryhql\"/>。"
+#, no-c-format
+msgid "Here, we are using a Hibernate Query Language (HQL) query to load all existing <literal>Event</literal> objects from the database. Hibernate will generate the appropriate SQL, send it to the database and populate <literal>Event</literal> objects with the data. You can create more complex queries with HQL. See <xref linkend=\"queryhql\" /> for more information."
+msgstr "我们在这里是用一个 HQL(Hibernate Query Language-Hibernate查询语言)查询语句来从数据库中加载所有存在的 <literal>Event</literal> 对象。Hibernate 会生成适当的 SQL,把它发送到数据库,并操作从查询得到数据的 <literal>Event</literal> 对象。当然,你可以使用 HQL 来创建更加复杂的查询。更多信息请参考 <xref linkend=\"queryhql\"/>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Now we can call our new functionality, again using the Maven exec plugin: "
-"<command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager"
-"\" -Dexec.args=\"list\"</command>"
-msgstr ""
-"现在我们可以再次用 Maven exec plugin - <command>mvn exec:java -Dexec."
-"mainClass=\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"list\"</"
-"command> 调用新的功能了。"
+msgid "Now we can call our new functionality, again using the Maven exec plugin: <command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"list\"</command>"
+msgstr "现在我们可以再次用 Maven exec plugin - <command>mvn exec:java -Dexec.mainClass=\"org.hibernate.tutorial.EventManager\" -Dexec.args=\"list\"</command> 调用新的功能了。"
#. Tag: title
#, no-c-format
@@ -910,16 +392,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"So far we have mapped a single persistent entity class to a table in "
-"isolation. Let's expand on that a bit and add some class associations. We "
-"will add people to the application and store a list of events in which they "
-"participate."
-msgstr ""
-"我们已经映射了一个持久化实体类到表上。让我们在这个基础上增加一些类之间的关"
-"联。首先我们往应用程序里增加人(people)的概念,并存储他们所参与的一个 Event "
-"列表。(译者注:与 Event 一样,我们在后面将直接使用 person 来表示“人”而不是它"
-"的中文翻译) "
+msgid "So far we have mapped a single persistent entity class to a table in isolation. Let's expand on that a bit and add some class associations. We will add people to the application and store a list of events in which they participate."
+msgstr "我们已经映射了一个持久化实体类到表上。让我们在这个基础上增加一些类之间的关联。首先我们往应用程序里增加人(people)的概念,并存储他们所参与的一个 Event 列表。(译者注:与 Event 一样,我们在后面将直接使用 person 来表示“人”而不是它的中文翻译) "
#. Tag: title
#, no-c-format
@@ -933,21 +407,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Save this to a file named <filename>src/main/java/org/hibernate/tutorial/"
-"domain/Person.java</filename>"
-msgstr ""
-"把它保存为文件 <filename>src/main/java/org/hibernate/tutorial/domain/Person."
-"java</filename>。"
+msgid "Save this to a file named <filename>src/main/java/org/hibernate/tutorial/domain/Person.java</filename>"
+msgstr "把它保存为文件 <filename>src/main/java/org/hibernate/tutorial/domain/Person.java</filename>。"
#. Tag: para
#, no-c-format
-msgid ""
-"Next, create the new mapping file as <filename>src/main/resources/org/"
-"hibernate/tutorial/domain/Person.hbm.xml</filename>"
-msgstr ""
-"然后,创建新的映射文件 <filename>src/main/resources/org/hibernate/tutorial/"
-"domain/Person.hbm.xml</filename>。"
+msgid "Next, create the new mapping file as <filename>src/main/resources/org/hibernate/tutorial/domain/Person.hbm.xml</filename>"
+msgstr "然后,创建新的映射文件 <filename>src/main/resources/org/hibernate/tutorial/domain/Person.hbm.xml</filename>。"
#. Tag: para
#, no-c-format
@@ -956,14 +422,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Create an association between these two entities. Persons can participate in "
-"events, and events have participants. The design questions you have to deal "
-"with are: directionality, multiplicity, and collection behavior."
-msgstr ""
-"现在我们在这两个实体之间创建一个关联。显然,persons 可以参与一系列 events,"
-"而 events 也有不同的参加者(persons)。我们需要处理的设计问题是关联方向"
-"(directionality),阶数(multiplicity)和集合(collection)的行为。 "
+msgid "Create an association between these two entities. Persons can participate in events, and events have participants. The design questions you have to deal with are: directionality, multiplicity, and collection behavior."
+msgstr "现在我们在这两个实体之间创建一个关联。显然,persons 可以参与一系列 events,而 events 也有不同的参加者(persons)。我们需要处理的设计问题是关联方向(directionality),阶数(multiplicity)和集合(collection)的行为。 "
#. Tag: title
#, no-c-format
@@ -972,65 +432,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"By adding a collection of events to the <literal>Person</literal> class, you "
-"can easily navigate to the events for a particular person, without executing "
-"an explicit query - by calling <literal>Person#getEvents</literal>. Multi-"
-"valued associations are represented in Hibernate by one of the Java "
-"Collection Framework contracts; here we choose a <interfacename>java.util."
-"Set</interfacename> because the collection will not contain duplicate "
-"elements and the ordering is not relevant to our examples:"
-msgstr ""
-"我们将向 <literal>Person</literal> 类增加一连串的 events。那样,通过调用 "
-"<literal>aPerson.getEvents()</literal>,就可以轻松地导航到特定 person 所参与"
-"的 events,而不用去执行一个显式的查询。我们使用 Java 的集合类(collection):"
-"<literal>Set</literal>,因为 set 不包含重复的元素及与我们无关的排序。 "
+msgid "By adding a collection of events to the <literal>Person</literal> class, you can easily navigate to the events for a particular person, without executing an explicit query - by calling <literal>Person#getEvents</literal>. Multi-valued associations are represented in Hibernate by one of the Java Collection Framework contracts; here we choose a <interfacename>java.util.Set</interfacename> because the collection will not contain duplicate elements and the ordering is not relevant to our examples:"
+msgstr "我们将向 <literal>Person</literal> 类增加一连串的 events。那样,通过调用 <literal>aPerson.getEvents()</literal>,就可以轻松地导航到特定 person 所参与的 events,而不用去执行一个显式的查询。我们使用 Java 的集合类(collection):<literal>Set</literal>,因为 set 不包含重复的元素及与我们无关的排序。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Before mapping this association, let's consider the other side. We could "
-"just keep this unidirectional or create another collection on the "
-"<literal>Event</literal>, if we wanted to be able to navigate it from both "
-"directions. This is not necessary, from a functional perspective. You can "
-"always execute an explicit query to retrieve the participants for a "
-"particular event. This is a design choice left to you, but what is clear "
-"from this discussion is the multiplicity of the association: \"many\" valued "
-"on both sides is called a <emphasis>many-to-many</emphasis> association. "
-"Hence, we use Hibernate's many-to-many mapping:"
-msgstr ""
-"在映射这个关联之前,先考虑一下此关联的另外一端。很显然,我们可以保持这个关联"
-"是单向的。或者,我们可以在 <literal>Event</literal> 里创建另外一个集合,如果"
-"希望能够双向地导航,如:<literal>anEvent.getParticipants()</literal>。从功能"
-"的角度来说,这并不是必须的。因为你总可以显式地执行一个查询,以获得某个特定 "
-"event 的所有参与者。这是个在设计时需要做出的选择,完全由你来决定,但此讨论中"
-"关于关联的阶数是清楚的:即两端都是“多”值的,我们把它叫做<emphasis>多对多"
-"(many-to-many)</emphasis>关联。因而,我们使用 Hibernate 的多对多映射: "
+msgid "Before mapping this association, let's consider the other side. We could just keep this unidirectional or create another collection on the <literal>Event</literal>, if we wanted to be able to navigate it from both directions. This is not necessary, from a functional perspective. You can always execute an explicit query to retrieve the participants for a particular event. This is a design choice left to you, but what is clear from this discussion is the multiplicity of the association: \"many\" valued on both sides is called a <emphasis>many-to-many</emphasis> association. Hence, we use Hibernate's many-to-many mapping:"
+msgstr "在映射这个关联之前,先考虑一下此关联的另外一端。很显然,我们可以保持这个关联是单向的。或者,我们可以在 <literal>Event</literal> 里创建另外一个集合,如果希望能够双向地导航,如:<literal>anEvent.getParticipants()</literal>。从功能的角度来说,这并不是必须的。因为你总可以显式地执行一个查询,以获得某个特定 event 的所有参与者。这是个在设计时需要做出的选择,完全由你来决定,但此讨论中关于关联的阶数是清楚的:即两端都是“多”值的,我们把它叫做<emphasis>多对多(many-to-many)</emphasis>关联。因而,我们使用 Hibernate 的多对多映射: "
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate supports a broad range of collection mappings, a <literal>set</"
-"literal> being most common. For a many-to-many association, or <emphasis>n:"
-"m</emphasis> entity relationship, an association table is required. Each row "
-"in this table represents a link between a person and an event. The table "
-"name is decalred using the <literal>table</literal> attribute of the "
-"<literal>set</literal> element. The identifier column name in the "
-"association, for the person side, is defined with the <literal>key</literal> "
-"element, the column name for the event's side with the <literal>column</"
-"literal> attribute of the <literal>many-to-many</literal>. You also have to "
-"tell Hibernate the class of the objects in your collection (the class on the "
-"other side of the collection of references)."
-msgstr ""
-"Hibernate 支持各种各样的集合映射,<literal><set></literal> 使用的最为普"
-"遍。对于多对多关联(或叫 <emphasis>n:m</emphasis> 实体关系), 需要一个关联表"
-"(association table)。<literal>表</literal>里面的每一行代表从 person 到 "
-"event 的一个关联。表名是由 <literal>set</literal> 元素的 <literal>table</"
-"literal> 属性配置的。关联里面的标识符字段名,对于 person 的一端,是由 "
-"<literal><key></literal> 元素定义,而 event 一端的字段名是由 "
-"<literal><many-to-many></literal> 元素的 <literal>column</literal> 属性"
-"定义。你也必须告诉 Hibernate 集合中对象的类(也就是位于这个集合所代表的关联另"
-"外一端的类)。 "
+msgid "Hibernate supports a broad range of collection mappings, a <literal>set</literal> being most common. For a many-to-many association, or <emphasis>n:m</emphasis> entity relationship, an association table is required. Each row in this table represents a link between a person and an event. The table name is decalred using the <literal>table</literal> attribute of the <literal>set</literal> element. The identifier column name in the association, for the person side, is defined with the <literal>key</literal> element, the column name for the event's side with the <literal>column</literal> attribute of the <literal>many-to-many</literal>. You also have to tell Hibernate the class of the objects in your collection (the class on the other side of the collection of references)."
+msgstr "Hibernate 支持各种各样的集合映射,<literal><set></literal> 使用的最为普遍。对于多对多关联(或叫 <emphasis>n:m</emphasis> 实体关系), 需要一个关联表(association table)。<literal>表</literal>里面的每一行代表从 person 到 event 的一个关联。表名是由 <literal>set</literal> 元素的 <literal>table</literal> 属性配置的。关联里面的标识符字段名,对于 person 的一端,是由 <literal><key></literal> 元素定义,而 event 一端的字段名是由 <literal><many-to-many></literal> 元素的 <literal>column</literal> 属性定义。你也必须告诉 Hibernate 集合中对象的类(也就是位于这个集合所代表的关联另外一端的类)。 "
#. Tag: para
#, no-c-format
@@ -1044,122 +457,38 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Now we will bring some people and events together in a new method in "
-"<literal>EventManager</literal>:"
-msgstr ""
-"我们把一些 people 和 events 一起放到 <literal>EventManager</literal> 的新方法"
-"中: "
+msgid "Now we will bring some people and events together in a new method in <literal>EventManager</literal>:"
+msgstr "我们把一些 people 和 events 一起放到 <literal>EventManager</literal> 的新方法中: "
#. Tag: para
#, no-c-format
-msgid ""
-"After loading a <literal>Person</literal> and an <literal>Event</literal>, "
-"simply modify the collection using the normal collection methods. There is "
-"no explicit call to <literal>update()</literal> or <literal>save()</"
-"literal>; Hibernate automatically detects that the collection has been "
-"modified and needs to be updated. This is called <emphasis>automatic dirty "
-"checking</emphasis>. You can also try it by modifying the name or the date "
-"property of any of your objects. As long as they are in "
-"<emphasis>persistent</emphasis> state, that is, bound to a particular "
-"Hibernate <interfacename>org.hibernate.Session</interfacename>, Hibernate "
-"monitors any changes and executes SQL in a write-behind fashion. The process "
-"of synchronizing the memory state with the database, usually only at the end "
-"of a unit of work, is called <emphasis>flushing</emphasis>. In our code, the "
-"unit of work ends with a commit, or rollback, of the database transaction."
-msgstr ""
-"在加载一 <literal>Person</literal> 和 <literal>Event</literal> 后,使用普通的"
-"集合方法就可容易地修改我们定义的集合。如你所见,没有显式的 <literal>update()"
-"</literal> 或 <literal>save()</literal>,Hibernate 会自动检测到集合已经被修改"
-"并需要更新回数据库。这叫做自动脏检查(<emphasis>automatic dirty checking</"
-"emphasis>),你也可以尝试修改任何对象的 name 或者 date 属性,只要他们处于"
-"<emphasis>持久化</emphasis>状态,也就是被绑定到某个 Hibernate 的 "
-"<literal>Session</literal> 上(如:他们刚刚在一个单元操作被加载或者保存),"
-"Hibernate 监视任何改变并在后台隐式写的方式执行 SQL。同步内存状态和数据库的过"
-"程,通常只在单元操作结束的时候发生,称此过程为清理缓存<emphasis>(flushing)"
-"</emphasis>。在我们的代码中,工作单元由数据库事务的提交(或者回滚)来结束——这"
-"是由 <literal>CurrentSessionContext</literal> 类的 <literal>thread</literal> "
-"配置选项定义的。 "
+msgid "After loading a <literal>Person</literal> and an <literal>Event</literal>, simply modify the collection using the normal collection methods. There is no explicit call to <literal>update()</literal> or <literal>save()</literal>; Hibernate automatically detects that the collection has been modified and needs to be updated. This is called <emphasis>automatic dirty checking</emphasis>. You can also try it by modifying the name or the date property of any of your objects. As long as they are in <emphasis>persistent</emphasis> state, that is, bound to a particular Hibernate <interfacename>org.hibernate.Session</interfacename>, Hibernate monitors any changes and executes SQL in a write-behind fashion. The process of synchronizing the memory state with the database, usually only at the end of a unit of work, is called <emphasis>flushing</emphasis>. In our code, the unit of work ends with a commit, or rollback, of the database transaction."
+msgstr "在加载一 <literal>Person</literal> 和 <literal>Event</literal> 后,使用普通的集合方法就可容易地修改我们定义的集合。如你所见,没有显式的 <literal>update()</literal> 或 <literal>save()</literal>,Hibernate 会自动检测到集合已经被修改并需要更新回数据库。这叫做自动脏检查(<emphasis>automatic dirty checking</emphasis>),你也可以尝试修改任何对象的 name 或者 date 属性,只要他们处于<emphasis>持久化</emphasis>状态,也就是被绑定到某个 Hibernate 的 <literal>Session</literal> 上(如:他们刚刚在一个单元操作被加载或者保存),Hibernate 监视任何改变并在后台隐式写的方式执行 SQL。同步内存状态和数据库的过程,通常只在单元操作结束的时候发生,称此过程为清理缓存<emphasis>(flushing)</emphasis>。在我们的代码中,工作单元由数据库事务的提交(或者回滚)来结束——!
这是由 <literal>CurrentSessionContext</literal> 类的 <literal>thread</literal> 配置选项定义的。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can load person and event in different units of work. Or you can modify "
-"an object outside of a <interfacename>org.hibernate.Session</interfacename>, "
-"when it is not in persistent state (if it was persistent before, this state "
-"is called <emphasis>detached</emphasis>). You can even modify a collection "
-"when it is detached:"
-msgstr ""
-"当然,你也可以在不同的单元操作里面加载 person 和 event。或在 "
-"<literal>Session</literal> 以外修改不是处在持久化(persistent)状态下的对象"
-"(如果该对象以前曾经被持久化,那么我们称这个状态为<emphasis>脱管(detached)"
-"</emphasis>)。你甚至可以在一个集合被脱管时修改它: "
+msgid "You can load person and event in different units of work. Or you can modify an object outside of a <interfacename>org.hibernate.Session</interfacename>, when it is not in persistent state (if it was persistent before, this state is called <emphasis>detached</emphasis>). You can even modify a collection when it is detached:"
+msgstr "当然,你也可以在不同的单元操作里面加载 person 和 event。或在 <literal>Session</literal> 以外修改不是处在持久化(persistent)状态下的对象(如果该对象以前曾经被持久化,那么我们称这个状态为<emphasis>脱管(detached)</emphasis>)。你甚至可以在一个集合被脱管时修改它: "
#. Tag: para
#, no-c-format
-msgid ""
-"The call to <literal>update</literal> makes a detached object persistent "
-"again by binding it to a new unit of work, so any modifications you made to "
-"it while detached can be saved to the database. This includes any "
-"modifications (additions/deletions) you made to a collection of that entity "
-"object."
-msgstr ""
-"对 <literal>update</literal> 的调用使一个脱管对象重新持久化,你可以说它被绑定"
-"到一个新的单元操作上,所以在脱管状态下对它所做的任何修改都会被保存到数据库"
-"里。这也包括你对这个实体对象的集合所作的任何改动(增加/删除)。 "
+msgid "The call to <literal>update</literal> makes a detached object persistent again by binding it to a new unit of work, so any modifications you made to it while detached can be saved to the database. This includes any modifications (additions/deletions) you made to a collection of that entity object."
+msgstr "对 <literal>update</literal> 的调用使一个脱管对象重新持久化,你可以说它被绑定到一个新的单元操作上,所以在脱管状态下对它所做的任何修改都会被保存到数据库里。这也包括你对这个实体对象的集合所作的任何改动(增加/删除)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This is not much use in our example, but it is an important concept you can "
-"incorporate into your own application. Complete this exercise by adding a "
-"new action to the main method of the <literal>EventManager</literal> and "
-"call it from the command line. If you need the identifiers of a person and "
-"an event - the <literal>save()</literal> method returns it (you might have "
-"to modify some of the previous methods to return that identifier):"
-msgstr ""
-"这对我们当前的情形不是很有用,但它是非常重要的概念,你可以把它融入到你自己的"
-"应用程序设计中。在<literal>EventManager</literal>的 main 方法中添加一个新的动"
-"作,并从命令行运行它来完成我们所做的练习。如果你需要 person 及 event 的标识"
-"符 — 那就用 <literal>save()</literal> 方法返回它(你可能需要修改前面的一些方"
-"法来返回那个标识符): "
+msgid "This is not much use in our example, but it is an important concept you can incorporate into your own application. Complete this exercise by adding a new action to the main method of the <literal>EventManager</literal> and call it from the command line. If you need the identifiers of a person and an event - the <literal>save()</literal> method returns it (you might have to modify some of the previous methods to return that identifier):"
+msgstr "这对我们当前的情形不是很有用,但它是非常重要的概念,你可以把它融入到你自己的应用程序设计中。在<literal>EventManager</literal>的 main 方法中添加一个新的动作,并从命令行运行它来完成我们所做的练习。如果你需要 person 及 event 的标识符 — 那就用 <literal>save()</literal> 方法返回它(你可能需要修改前面的一些方法来返回那个标识符): "
#. Tag: para
#, no-c-format
-msgid ""
-"This is an example of an association between two equally important classes : "
-"two entities. As mentioned earlier, there are other classes and types in a "
-"typical model, usually \"less important\". Some you have already seen, like "
-"an <literal>int</literal> or a <classname>java.lang.String</classname>. We "
-"call these classes <emphasis>value types</emphasis>, and their instances "
-"<emphasis>depend</emphasis> on a particular entity. Instances of these types "
-"do not have their own identity, nor are they shared between entities. Two "
-"persons do not reference the same <literal>firstname</literal> object, even "
-"if they have the same first name. Value types cannot only be found in the "
-"JDK , but you can also write dependent classes yourself such as an "
-"<literal>Address</literal> or <literal>MonetaryAmount</literal> class. In "
-"fact, in a Hibernate application all JDK classes are considered value types."
-msgstr ""
-"上面是个关于两个同等重要的实体类间关联的例子。像前面所提到的那样,在特定的模"
-"型中也存在其它的类和类型,这些类和类型通常是“次要的”。你已看到过其中的一些,"
-"像 <literal>int</literal> 或 <literal>String</literal>。我们称这些类为"
-"<emphasis>值类型(value type)</emphasis>,它们的实例<emphasis>依赖(depend)"
-"</emphasis>在某个特定的实体上。这些类型的实例没有它们自己的标识(identity),"
-"也不能在实体间被共享(比如,两个 person 不能引用同一个 <literal>firstname</"
-"literal> 对象,即使他们有相同的 first name)。当然,值类型并不仅仅在 JDK 中存"
-"在(事实上,在一个 Hibernate 应用程序中,所有的 JDK 类都被视为值类型),而且"
-"你也可以编写你自己的依赖类,例如 <literal>Address</literal>,"
-"<literal>MonetaryAmount</literal>。 "
+msgid "This is an example of an association between two equally important classes : two entities. As mentioned earlier, there are other classes and types in a typical model, usually \"less important\". Some you have already seen, like an <literal>int</literal> or a <classname>java.lang.String</classname>. We call these classes <emphasis>value types</emphasis>, and their instances <emphasis>depend</emphasis> on a particular entity. Instances of these types do not have their own identity, nor are they shared between entities. Two persons do not reference the same <literal>firstname</literal> object, even if they have the same first name. Value types cannot only be found in the JDK , but you can also write dependent classes yourself such as an <literal>Address</literal> or <literal>MonetaryAmount</literal> class. In fact, in a Hibernate application all JDK classes are considered value types."
+msgstr "上面是个关于两个同等重要的实体类间关联的例子。像前面所提到的那样,在特定的模型中也存在其它的类和类型,这些类和类型通常是“次要的”。你已看到过其中的一些,像 <literal>int</literal> 或 <literal>String</literal>。我们称这些类为<emphasis>值类型(value type)</emphasis>,它们的实例<emphasis>依赖(depend)</emphasis>在某个特定的实体上。这些类型的实例没有它们自己的标识(identity),也不能在实体间被共享(比如,两个 person 不能引用同一个 <literal>firstname</literal> 对象,即使他们有相同的 first name)。当然,值类型并不仅仅在 JDK 中存在(事实上,在一个 Hibernate 应用程序中,所有的 JDK 类都被视为值类型),而且你也可以编写你自己的依赖类,例如 <literal>Address</literal>,<literal>MonetaryAmount</literal>。 "
#. Tag: para
#, no-c-format
-msgid ""
-"You can also design a collection of value types. This is conceptually "
-"different from a collection of references to other entities, but looks "
-"almost the same in Java."
-msgstr ""
-"你也可以设计一个值类型的集合,这在概念上与引用其它实体的集合有很大的不同,但"
-"是在 Java 里面看起来几乎是一样的。 "
+msgid "You can also design a collection of value types. This is conceptually different from a collection of references to other entities, but looks almost the same in Java."
+msgstr "你也可以设计一个值类型的集合,这在概念上与引用其它实体的集合有很大的不同,但是在 Java 里面看起来几乎是一样的。 "
#. Tag: title
#, no-c-format
@@ -1168,14 +497,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Let's add a collection of email addresses to the <literal>Person</literal> "
-"entity. This will be represented as a <interfacename>java.util.Set</"
-"interfacename> of <classname>java.lang.String</classname> instances:"
-msgstr ""
-"让我们在 <literal>Person</literal> 实体里添加一个电子邮件的集合。这将以 "
-"<classname>java.lang.String</classname> 实例的 <interfacename>java.util.Set</"
-"interfacename> 出现:"
+msgid "Let's add a collection of email addresses to the <literal>Person</literal> entity. This will be represented as a <interfacename>java.util.Set</interfacename> of <classname>java.lang.String</classname> instances:"
+msgstr "让我们在 <literal>Person</literal> 实体里添加一个电子邮件的集合。这将以 <classname>java.lang.String</classname> 实例的 <interfacename>java.util.Set</interfacename> 出现:"
#. Tag: para
#, no-c-format
@@ -1184,26 +507,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"The difference compared with the earlier mapping is the use of the "
-"<literal>element</literal> part which tells Hibernate that the collection "
-"does not contain references to another entity, but is rather a collection "
-"whose elements are values types, here specifically of type <literal>string</"
-"literal>. The lowercase name tells you it is a Hibernate mapping type/"
-"converter. Again the <literal>table</literal> attribute of the <literal>set</"
-"literal> element determines the table name for the collection. The "
-"<literal>key</literal> element defines the foreign-key column name in the "
-"collection table. The <literal>column</literal> attribute in the "
-"<literal>element</literal> element defines the column name where the email "
-"address values will actually be stored."
-msgstr ""
-"比较这次和此前映射的差别,主要在于 <literal>element</literal> 部分,这次并没"
-"有包含对其它实体引用的集合,而是元素类型为 <literal>String</literal> 的集合"
-"(在映射中使用小写的名字”string“是向你表明它是一个 Hibernate 的映射类型或者类"
-"型转换器)。和之前一样,<literal>set</literal> 元素的 <literal>table</"
-"literal> 属性决定了用于集合的表名。<literal>key</literal> 元素定义了在集合表"
-"中外键的字段名。<literal>element</literal> 元素的 <literal>column</literal> "
-"属性定义用于实际保存 <literal>String</literal> 值的字段名。 "
+msgid "The difference compared with the earlier mapping is the use of the <literal>element</literal> part which tells Hibernate that the collection does not contain references to another entity, but is rather a collection whose elements are values types, here specifically of type <literal>string</literal>. The lowercase name tells you it is a Hibernate mapping type/converter. Again the <literal>table</literal> attribute of the <literal>set</literal> element determines the table name for the collection. The <literal>key</literal> element defines the foreign-key column name in the collection table. The <literal>column</literal> attribute in the <literal>element</literal> element defines the column name where the email address values will actually be stored."
+msgstr "比较这次和此前映射的差别,主要在于 <literal>element</literal> 部分,这次并没有包含对其它实体引用的集合,而是元素类型为 <literal>String</literal> 的集合(在映射中使用小写的名字”string“是向你表明它是一个 Hibernate 的映射类型或者类型转换器)。和之前一样,<literal>set</literal> 元素的 <literal>table</literal> 属性决定了用于集合的表名。<literal>key</literal> 元素定义了在集合表中外键的字段名。<literal>element</literal> 元素的 <literal>column</literal> 属性定义用于实际保存 <literal>String</literal> 值的字段名。 "
#. Tag: para
#, no-c-format
@@ -1212,35 +517,18 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can see that the primary key of the collection table is in fact a "
-"composite key that uses both columns. This also implies that there cannot be "
-"duplicate email addresses per person, which is exactly the semantics we need "
-"for a set in Java."
-msgstr ""
-"你可以看到集合表的主键实际上是个复合主键,同时使用了两个字段。这也暗示了对于"
-"同一个 person 不能有重复的 email 地址,这正是 Java 里面使用 Set 时候所需要的"
-"语义(Set 里元素不能重复)。"
+msgid "You can see that the primary key of the collection table is in fact a composite key that uses both columns. This also implies that there cannot be duplicate email addresses per person, which is exactly the semantics we need for a set in Java."
+msgstr "你可以看到集合表的主键实际上是个复合主键,同时使用了两个字段。这也暗示了对于同一个 person 不能有重复的 email 地址,这正是 Java 里面使用 Set 时候所需要的语义(Set 里元素不能重复)。"
#. Tag: para
#, no-c-format
-msgid ""
-"You can now try to add elements to this collection, just like we did before "
-"by linking persons and events. It is the same code in Java:"
-msgstr ""
-"你现在可以试着把元素加入到这个集合,就像我们在之前关联 person 和 event 的那"
-"样。其实现的 Java 代码是相同的: "
+msgid "You can now try to add elements to this collection, just like we did before by linking persons and events. It is the same code in Java:"
+msgstr "你现在可以试着把元素加入到这个集合,就像我们在之前关联 person 和 event 的那样。其实现的 Java 代码是相同的: "
#. Tag: para
#, no-c-format
-msgid ""
-"This time we did not use a <emphasis>fetch</emphasis> query to initialize "
-"the collection. Monitor the SQL log and try to optimize this with an eager "
-"fetch."
-msgstr ""
-"这次我们没有使用 <emphasis>fetch</emphasis> 查询来初始化集合。因此,调用其 "
-"getter 方法会触发另一附加的 select 来初始化集合,这样我们才能把元素添加进去。"
-"检查 SQL log,试着通过预先抓取来优化它。 "
+msgid "This time we did not use a <emphasis>fetch</emphasis> query to initialize the collection. Monitor the SQL log and try to optimize this with an eager fetch."
+msgstr "这次我们没有使用 <emphasis>fetch</emphasis> 查询来初始化集合。因此,调用其 getter 方法会触发另一附加的 select 来初始化集合,这样我们才能把元素添加进去。检查 SQL log,试着通过预先抓取来优化它。 "
#. Tag: title
#, no-c-format
@@ -1249,68 +537,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Next you will map a bi-directional association. You will make the "
-"association between person and event work from both sides in Java. The "
-"database schema does not change, so you will still have many-to-many "
-"multiplicity."
-msgstr ""
-"接下来我们将映射双向关联(bi-directional association)— 在 Java 里让 person "
-"和 event 可以从关联的任何一端访问另一端。当然,数据库 schema 没有改变,我们仍"
-"然需要多对多的阶数。一个关系型数据库要比网络编程语言更加灵活,所以它并不需要"
-"任何像导航方向(navigation direction)的东西 — 数据可以用任何可能的方式进行查"
-"看和获取。 "
+msgid "Next you will map a bi-directional association. You will make the association between person and event work from both sides in Java. The database schema does not change, so you will still have many-to-many multiplicity."
+msgstr "接下来我们将映射双向关联(bi-directional association)— 在 Java 里让 person 和 event 可以从关联的任何一端访问另一端。当然,数据库 schema 没有改变,我们仍然需要多对多的阶数。一个关系型数据库要比网络编程语言更加灵活,所以它并不需要任何像导航方向(navigation direction)的东西 — 数据可以用任何可能的方式进行查看和获取。 "
#. Tag: para
#, no-c-format
-msgid ""
-"A relational database is more flexible than a network programming language, "
-"in that it does not need a navigation direction; data can be viewed and "
-"retrieved in any possible way."
-msgstr ""
-"关系型数据库比网络编程语言更为灵活,因为它不需要方向导航,其数据可以用任何可"
-"能的方式进行查看和提取。"
+msgid "A relational database is more flexible than a network programming language, in that it does not need a navigation direction; data can be viewed and retrieved in any possible way."
+msgstr "关系型数据库比网络编程语言更为灵活,因为它不需要方向导航,其数据可以用任何可能的方式进行查看和提取。"
#. Tag: para
#, no-c-format
-msgid ""
-"First, add a collection of participants to the <literal>Event</literal> "
-"class:"
-msgstr ""
-"首先,把一个参与者(person)的集合加入 <literal>Event</literal> 类中: "
+msgid "First, add a collection of participants to the <literal>Event</literal> class:"
+msgstr "首先,把一个参与者(person)的集合加入 <literal>Event</literal> 类中: "
#. Tag: para
#, no-c-format
-msgid ""
-"Now map this side of the association in <literal>Event.hbm.xml</literal>."
+msgid "Now map this side of the association in <literal>Event.hbm.xml</literal>."
msgstr "在 <literal>Event.hbm.xml</literal> 里面也映射这个关联。 "
#. Tag: para
#, no-c-format
-msgid ""
-"These are normal <literal>set</literal> mappings in both mapping documents. "
-"Notice that the column names in <literal>key</literal> and <literal>many-to-"
-"many</literal> swap in both mapping documents. The most important addition "
-"here is the <literal>inverse=\"true\"</literal> attribute in the "
-"<literal>set</literal> element of the <literal>Event</literal>'s collection "
-"mapping."
-msgstr ""
-"如你所见,两个映射文件里都有普通的 <literal>set</literal> 映射。注意在两个映"
-"射文件中,互换了 <literal>key</literal> 和 <literal>many-to-many</literal> 的"
-"字段名。这里最重要的是 <literal>Event</literal> 映射文件里增加了 "
-"<literal>set</literal> 元素的 <literal>inverse=\"true\"</literal> 属性。 "
+msgid "These are normal <literal>set</literal> mappings in both mapping documents. Notice that the column names in <literal>key</literal> and <literal>many-to-many</literal> swap in both mapping documents. The most important addition here is the <literal>inverse=\"true\"</literal> attribute in the <literal>set</literal> element of the <literal>Event</literal>'s collection mapping."
+msgstr "如你所见,两个映射文件里都有普通的 <literal>set</literal> 映射。注意在两个映射文件中,互换了 <literal>key</literal> 和 <literal>many-to-many</literal> 的字段名。这里最重要的是 <literal>Event</literal> 映射文件里增加了 <literal>set</literal> 元素的 <literal>inverse=\"true\"</literal> 属性。 "
#. Tag: para
#, no-c-format
-msgid ""
-"What this means is that Hibernate should take the other side, the "
-"<literal>Person</literal> class, when it needs to find out information about "
-"the link between the two. This will be a lot easier to understand once you "
-"see how the bi-directional link between our two entities is created."
-msgstr ""
-"这意味着在需要的时候,Hibernate 能在关联的另一端 — <literal>Person</literal> "
-"类得到两个实体间关联的信息。这将会极大地帮助你理解双向关联是如何在两个实体间"
-"被创建的。 "
+msgid "What this means is that Hibernate should take the other side, the <literal>Person</literal> class, when it needs to find out information about the link between the two. This will be a lot easier to understand once you see how the bi-directional link between our two entities is created."
+msgstr "这意味着在需要的时候,Hibernate 能在关联的另一端 — <literal>Person</literal> 类得到两个实体间关联的信息。这将会极大地帮助你理解双向关联是如何在两个实体间被创建的。 "
#. Tag: title
#, no-c-format
@@ -1319,73 +572,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"First, keep in mind that Hibernate does not affect normal Java semantics. "
-"How did we create a link between a <literal>Person</literal> and an "
-"<literal>Event</literal> in the unidirectional example? You add an instance "
-"of <literal>Event</literal> to the collection of event references, of an "
-"instance of <literal>Person</literal>. If you want to make this link bi-"
-"directional, you have to do the same on the other side by adding a "
-"<literal>Person</literal> reference to the collection in an <literal>Event</"
-"literal>. This process of \"setting the link on both sides\" is absolutely "
-"necessary with bi-directional links."
-msgstr ""
-"首先请记住,Hibernate 并不影响通常的 Java 语义。 在单向关联的例子中,我们是怎"
-"样在 <literal>Person</literal> 和 <literal>Event</literal> 之间创建联系的?我"
-"们把 <literal>Event</literal> 实例添加到 <literal>Person</literal> 实例内的 "
-"event 引用集合里。因此很显然,如果我们要让这个关联可以双向地工作,我们需要在"
-"另外一端做同样的事情 - 把 <literal>Person</literal> 实例加入 "
-"<literal>Event</literal> 类内的 Person 引用集合。这“在关联的两端设置联系”是完"
-"全必要的而且你都得这么做。 "
+msgid "First, keep in mind that Hibernate does not affect normal Java semantics. How did we create a link between a <literal>Person</literal> and an <literal>Event</literal> in the unidirectional example? You add an instance of <literal>Event</literal> to the collection of event references, of an instance of <literal>Person</literal>. If you want to make this link bi-directional, you have to do the same on the other side by adding a <literal>Person</literal> reference to the collection in an <literal>Event</literal>. This process of \"setting the link on both sides\" is absolutely necessary with bi-directional links."
+msgstr "首先请记住,Hibernate 并不影响通常的 Java 语义。 在单向关联的例子中,我们是怎样在 <literal>Person</literal> 和 <literal>Event</literal> 之间创建联系的?我们把 <literal>Event</literal> 实例添加到 <literal>Person</literal> 实例内的 event 引用集合里。因此很显然,如果我们要让这个关联可以双向地工作,我们需要在另外一端做同样的事情 - 把 <literal>Person</literal> 实例加入 <literal>Event</literal> 类内的 Person 引用集合。这“在关联的两端设置联系”是完全必要的而且你都得这么做。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Many developers program defensively and create link management methods to "
-"correctly set both sides (for example, in <literal>Person</literal>):"
-msgstr ""
-"许多开发人员防御式地编程,创建管理关联的方法来保证正确的设置了关联的两端,比"
-"如在 <literal>Person</literal> 里: "
+msgid "Many developers program defensively and create link management methods to correctly set both sides (for example, in <literal>Person</literal>):"
+msgstr "许多开发人员防御式地编程,创建管理关联的方法来保证正确的设置了关联的两端,比如在 <literal>Person</literal> 里: "
#. Tag: para
#, no-c-format
-msgid ""
-"The get and set methods for the collection are now protected. This allows "
-"classes in the same package and subclasses to still access the methods, but "
-"prevents everybody else from altering the collections directly. Repeat the "
-"steps for the collection on the other side."
-msgstr ""
-"注意现在对于集合的 get 和 set 方法的访问级别是 protected — 这允许在位于同一个"
-"包(package)中的类以及继承自这个类的子类可以访问这些方法,但禁止其他任何人的"
-"直接访问,避免了集合内容的混乱。你应尽可能地在另一端也把集合的访问级别设成 "
-"protected。 "
+msgid "The get and set methods for the collection are now protected. This allows classes in the same package and subclasses to still access the methods, but prevents everybody else from altering the collections directly. Repeat the steps for the collection on the other side."
+msgstr "注意现在对于集合的 get 和 set 方法的访问级别是 protected — 这允许在位于同一个包(package)中的类以及继承自这个类的子类可以访问这些方法,但禁止其他任何人的直接访问,避免了集合内容的混乱。你应尽可能地在另一端也把集合的访问级别设成 protected。 "
#. Tag: para
#, no-c-format
-msgid ""
-"What about the <literal>inverse</literal> mapping attribute? For you, and "
-"for Java, a bi-directional link is simply a matter of setting the references "
-"on both sides correctly. Hibernate, however, does not have enough "
-"information to correctly arrange SQL <literal>INSERT</literal> and "
-"<literal>UPDATE</literal> statements (to avoid constraint violations). "
-"Making one side of the association <literal>inverse</literal> tells "
-"Hibernate to consider it a <emphasis>mirror</emphasis> of the other side. "
-"That is all that is necessary for Hibernate to resolve any issues that arise "
-"when transforming a directional navigation model to a SQL database schema. "
-"The rules are straightforward: all bi-directional associations need one side "
-"as <literal>inverse</literal>. In a one-to-many association it has to be the "
-"many-side, and in many-to-many association you can select either side."
-msgstr ""
-"<literal>inverse</literal> 映射属性究竟表示什么呢?对于你和 Java 来说,一个双"
-"向关联仅仅是在两端简单地正确设置引用。然而,Hibernate 并没有足够的信息去正确"
-"地执行 <literal>INSERT</literal> 和 <literal>UPDATE</literal> 语句(以避免违"
-"反数据库约束),所以它需要一些帮助来正确的处理双向关联。把关联的一端设置为 "
-"<literal>inverse</literal> 将告诉 Hibernate 忽略关联的这一端,把这端看成是另"
-"外一端的一个<emphasis>镜象(mirror)</emphasis>。这就是所需的全部信息,"
-"Hibernate 利用这些信息来处理把一个有向导航模型转移到数据库 schema 时的所有问"
-"题。你只需要记住这个直观的规则:所有的双向关联需要有一端被设置为 "
-"<literal>inverse</literal>。在一对多关联中它必须是代表多(many)的那端。而在"
-"多对多(many-to-many)关联中,你可以任意选取一端,因为两端之间并没有差别。 "
+msgid "What about the <literal>inverse</literal> mapping attribute? For you, and for Java, a bi-directional link is simply a matter of setting the references on both sides correctly. Hibernate, however, does not have enough information to correctly arrange SQL <literal>INSERT</literal> and <literal>UPDATE</literal> statements (to avoid constraint violations). Making one side of the association <literal>inverse</literal> tells Hibernate to consider it a <emphasis>mirror</emphasis> of the other side. That is all that is necessary for Hibernate to resolve any issues that arise when transforming a directional navigation model to a SQL database schema. The rules are straightforward: all bi-directional associations need one side as <literal>inverse</literal>. In a one-to-many association it has to be the many-side, and in many-to-many association you can select either side."
+msgstr "<literal>inverse</literal> 映射属性究竟表示什么呢?对于你和 Java 来说,一个双向关联仅仅是在两端简单地正确设置引用。然而,Hibernate 并没有足够的信息去正确地执行 <literal>INSERT</literal> 和 <literal>UPDATE</literal> 语句(以避免违反数据库约束),所以它需要一些帮助来正确的处理双向关联。把关联的一端设置为 <literal>inverse</literal> 将告诉 Hibernate 忽略关联的这一端,把这端看成是另外一端的一个<emphasis>镜象(mirror)</emphasis>。这就是所需的全部信息,Hibernate 利用这些信息来处理把一个有向导航模型转移到数据库 schema 时的所有问题。你只需要记住这个直观的规则:所有的双向关联需要有一端被设置为 <literal>inverse</literal>。在一对多关联中它必须是代表多(many)的那端。而在多对多(many-to-many)关联中,你可以任意选取一端,因为两端之!
间并没有差别。 "
#. Tag: title
#, no-c-format
@@ -1394,18 +597,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A Hibernate web application uses <literal>Session</literal> and "
-"<literal>Transaction</literal> almost like a standalone application. "
-"However, some common patterns are useful. You can now write an "
-"<literal>EventManagerServlet</literal>. This servlet can list all events "
-"stored in the database, and it provides an HTML form to enter new events."
-msgstr ""
-"Hibernate web 应用程序使用 <literal>Session</literal> 和 "
-"<literal>Transaction</literal> 的方式几乎和独立应用程序是一样的。但是,有一些"
-"常见的模式(pattern)非常有用。现在我们编写一个 "
-"<literal>EventManagerServlet</literal>。这个 servlet 可以列出数据库中保存的所"
-"有的 events,还提供一个 HTML 表单来增加新的 events。 "
+msgid "A Hibernate web application uses <literal>Session</literal> and <literal>Transaction</literal> almost like a standalone application. However, some common patterns are useful. You can now write an <literal>EventManagerServlet</literal>. This servlet can list all events stored in the database, and it provides an HTML form to enter new events."
+msgstr "Hibernate web 应用程序使用 <literal>Session</literal> 和 <literal>Transaction</literal> 的方式几乎和独立应用程序是一样的。但是,有一些常见的模式(pattern)非常有用。现在我们编写一个 <literal>EventManagerServlet</literal>。这个 servlet 可以列出数据库中保存的所有的 events,还提供一个 HTML 表单来增加新的 events。 "
#. Tag: title
#, no-c-format
@@ -1414,80 +607,33 @@
#. Tag: para
#, no-c-format
-msgid ""
-"First we need create our basic processing servlet. Since our servlet only "
-"handles HTTP <literal>GET</literal> requests, we will only implement the "
-"<literal>doGet()</literal> method:"
-msgstr ""
-"这个 servlet 只处理 HTTP <literal>GET</literal> 请求,因此,我们要实现的是 "
-"<literal>doGet()</literal> 方法: "
+msgid "First we need create our basic processing servlet. Since our servlet only handles HTTP <literal>GET</literal> requests, we will only implement the <literal>doGet()</literal> method:"
+msgstr "这个 servlet 只处理 HTTP <literal>GET</literal> 请求,因此,我们要实现的是 <literal>doGet()</literal> 方法: "
#. Tag: para
#, no-c-format
-msgid ""
-"Save this servlet as <filename>src/main/java/org/hibernate/tutorial/web/"
-"EventManagerServlet.java</filename>"
-msgstr ""
-"把这个 servlet 保存为 <filename>src/main/java/org/hibernate/tutorial/web/"
-"EventManagerServlet.java</filename>。"
+msgid "Save this servlet as <filename>src/main/java/org/hibernate/tutorial/web/EventManagerServlet.java</filename>"
+msgstr "把这个 servlet 保存为 <filename>src/main/java/org/hibernate/tutorial/web/EventManagerServlet.java</filename>。"
#. Tag: para
#, no-c-format
-msgid ""
-"The pattern applied here is called <emphasis>session-per-request</emphasis>. "
-"When a request hits the servlet, a new Hibernate <literal>Session</literal> "
-"is opened through the first call to <literal>getCurrentSession()</literal> "
-"on the <literal>SessionFactory</literal>. A database transaction is then "
-"started. All data access occurs inside a transaction irrespective of whether "
-"the data is read or written. Do not use the auto-commit mode in applications."
-msgstr ""
-"我们称这里应用的模式为每次请求一个 session<emphasis>(session-per-request)</"
-"emphasis>。当有请求到达这个 servlet 的时候,通过对 <literal>SessionFactory</"
-"literal> 的第一次调用,打开一个新的 Hibernate <literal>Session</literal>。然"
-"后启动一个数据库事务 — 所有的数据访问都是在事务中进行,不管是读还是写(我们在"
-"应用程序中不使用 auto-commit 模式)。 "
+msgid "The pattern applied here is called <emphasis>session-per-request</emphasis>. When a request hits the servlet, a new Hibernate <literal>Session</literal> is opened through the first call to <literal>getCurrentSession()</literal> on the <literal>SessionFactory</literal>. A database transaction is then started. All data access occurs inside a transaction irrespective of whether the data is read or written. Do not use the auto-commit mode in applications."
+msgstr "我们称这里应用的模式为每次请求一个 session<emphasis>(session-per-request)</emphasis>。当有请求到达这个 servlet 的时候,通过对 <literal>SessionFactory</literal> 的第一次调用,打开一个新的 Hibernate <literal>Session</literal>。然后启动一个数据库事务 — 所有的数据访问都是在事务中进行,不管是读还是写(我们在应用程序中不使用 auto-commit 模式)。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Do <emphasis>not</emphasis> use a new Hibernate <literal>Session</literal> "
-"for every database operation. Use one Hibernate <literal>Session</literal> "
-"that is scoped to the whole request. Use <literal>getCurrentSession()</"
-"literal>, so that it is automatically bound to the current Java thread."
-msgstr ""
-"<emphasis>不要</emphasis>为每次数据库操作都使用一个新的 Hibernate "
-"<literal>Session</literal>。将 Hibernate <literal>Session</literal> 的范围设"
-"置为整个请求。要用 <literal>getCurrentSession()</literal>,这样它自动会绑定到"
-"当前 Java 线程。"
+msgid "Do <emphasis>not</emphasis> use a new Hibernate <literal>Session</literal> for every database operation. Use one Hibernate <literal>Session</literal> that is scoped to the whole request. Use <literal>getCurrentSession()</literal>, so that it is automatically bound to the current Java thread."
+msgstr "<emphasis>不要</emphasis>为每次数据库操作都使用一个新的 Hibernate <literal>Session</literal>。将 Hibernate <literal>Session</literal> 的范围设置为整个请求。要用 <literal>getCurrentSession()</literal>,这样它自动会绑定到当前 Java 线程。"
#. Tag: para
#, no-c-format
-msgid ""
-"Next, the possible actions of the request are processed and the response "
-"HTML is rendered. We will get to that part soon."
-msgstr ""
-"下一步,对请求的可能动作进行处理,渲染出反馈的 HTML。我们很快就会涉及到那部"
-"分。 "
+msgid "Next, the possible actions of the request are processed and the response HTML is rendered. We will get to that part soon."
+msgstr "下一步,对请求的可能动作进行处理,渲染出反馈的 HTML。我们很快就会涉及到那部分。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Finally, the unit of work ends when processing and rendering are complete. "
-"If any problems occurred during processing or rendering, an exception will "
-"be thrown and the database transaction rolled back. This completes the "
-"<literal>session-per-request</literal> pattern. Instead of the transaction "
-"demarcation code in every servlet, you could also write a servlet filter. "
-"See the Hibernate website and Wiki for more information about this pattern "
-"called <emphasis>Open Session in View</emphasis>. You will need it as soon "
-"as you consider rendering your view in JSP, not in a servlet."
-msgstr ""
-"最后,当处理与渲染都结束的时候,这个工作单元就结束了。假若在处理或渲染的时候"
-"有任何错误发生,会抛出一个异常,回滚数据库事务。这样,<literal>session-per-"
-"request</literal> 模式就完成了。为了避免在每个 servlet 中都编写事务边界界定的"
-"代码,可以考虑写一个 servlet 过滤器(filter)来更好地解决。关于这一模式的更多"
-"信息,请参阅 Hibernate 网站和 Wiki,这一模式叫做 <emphasis>Open Session in "
-"View</emphasis> — 只要你考虑用JSP来渲染你的视图(view),而不是在servlet中,"
-"你就会很快用到它。 "
+msgid "Finally, the unit of work ends when processing and rendering are complete. If any problems occurred during processing or rendering, an exception will be thrown and the database transaction rolled back. This completes the <literal>session-per-request</literal> pattern. Instead of the transaction demarcation code in every servlet, you could also write a servlet filter. See the Hibernate website and Wiki for more information about this pattern called <emphasis>Open Session in View</emphasis>. You will need it as soon as you consider rendering your view in JSP, not in a servlet."
+msgstr "最后,当处理与渲染都结束的时候,这个工作单元就结束了。假若在处理或渲染的时候有任何错误发生,会抛出一个异常,回滚数据库事务。这样,<literal>session-per-request</literal> 模式就完成了。为了避免在每个 servlet 中都编写事务边界界定的代码,可以考虑写一个 servlet 过滤器(filter)来更好地解决。关于这一模式的更多信息,请参阅 Hibernate 网站和 Wiki,这一模式叫做 <emphasis>Open Session in View</emphasis> — 只要你考虑用JSP来渲染你的视图(view),而不是在servlet中,你就会很快用到它。 "
#. Tag: title
#, no-c-format
@@ -1496,63 +642,28 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Now you can implement the processing of the request and the rendering of the "
-"page."
+msgid "Now you can implement the processing of the request and the rendering of the page."
msgstr "我们来实现处理请求以及渲染页面的工作。 "
#. Tag: para
#, no-c-format
-msgid ""
-"This coding style, with a mix of Java and HTML, would not scale in a more "
-"complex application—keep in mind that we are only illustrating basic "
-"Hibernate concepts in this tutorial. The code prints an HTML header and a "
-"footer. Inside this page, an HTML form for event entry and a list of all "
-"events in the database are printed. The first method is trivial and only "
-"outputs HTML:"
-msgstr ""
-"必须承认,这种编码风格把 Java 和 HTML 混在一起,在更复杂的应用程序里不应该大"
-"量使用 — 记住,在本章里我们仅仅是展示了 Hibernate 的基本概念。这段代码打印出"
-"了 HTML 页眉和页脚,在这个页面里,还打印了一个输入 events 条目的表单单并列出"
-"了数据库里的有的 events。第一个方法微不足道,仅仅是输出 HTML:"
+msgid "This coding style, with a mix of Java and HTML, would not scale in a more complex application—keep in mind that we are only illustrating basic Hibernate concepts in this tutorial. The code prints an HTML header and a footer. Inside this page, an HTML form for event entry and a list of all events in the database are printed. The first method is trivial and only outputs HTML:"
+msgstr "必须承认,这种编码风格把 Java 和 HTML 混在一起,在更复杂的应用程序里不应该大量使用 — 记住,在本章里我们仅仅是展示了 Hibernate 的基本概念。这段代码打印出了 HTML 页眉和页脚,在这个页面里,还打印了一个输入 events 条目的表单单并列出了数据库里的有的 events。第一个方法微不足道,仅仅是输出 HTML:"
#. Tag: para
#, no-c-format
-msgid ""
-"The <literal>listEvents()</literal> method uses the Hibernate "
-"<literal>Session</literal> bound to the current thread to execute a query:"
-msgstr ""
-"<literal>listEvents()</literal> 方法使用绑定到当前线程的 Hibernate "
-"<literal>Session</literal> 来执行查询:"
+msgid "The <literal>listEvents()</literal> method uses the Hibernate <literal>Session</literal> bound to the current thread to execute a query:"
+msgstr "<literal>listEvents()</literal> 方法使用绑定到当前线程的 Hibernate <literal>Session</literal> 来执行查询:"
#. Tag: para
#, no-c-format
-msgid ""
-"Finally, the <literal>store</literal> action is dispatched to the "
-"<literal>createAndStoreEvent()</literal> method, which also uses the "
-"<literal>Session</literal> of the current thread:"
-msgstr ""
-"最后,<literal>store</literal> 动作会被导向到 <literal>createAndStoreEvent()"
-"</literal> 方法,它也使用当前线程的 <literal>Session</literal>:"
+msgid "Finally, the <literal>store</literal> action is dispatched to the <literal>createAndStoreEvent()</literal> method, which also uses the <literal>Session</literal> of the current thread:"
+msgstr "最后,<literal>store</literal> 动作会被导向到 <literal>createAndStoreEvent()</literal> 方法,它也使用当前线程的 <literal>Session</literal>:"
#. Tag: para
#, no-c-format
-msgid ""
-"The servlet is now complete. A request to the servlet will be processed in a "
-"single <literal>Session</literal> and <literal>Transaction</literal>. As "
-"earlier in the standalone application, Hibernate can automatically bind "
-"these objects to the current thread of execution. This gives you the freedom "
-"to layer your code and access the <literal>SessionFactory</literal> in any "
-"way you like. Usually you would use a more sophisticated design and move the "
-"data access code into data access objects (the DAO pattern). See the "
-"Hibernate Wiki for more examples."
-msgstr ""
-"大功告成,这个 servlet 写完了。Hibernate 会在单一的 <literal>Session</"
-"literal> 和 <literal>Transaction</literal> 中处理到达的 servlet 请求。如同在"
-"前面的独立应用程序中那样,Hibernate 可以自动的把这些对象绑定到当前运行的线程"
-"中。这给了你用任何你喜欢的方式来对代码分层及访问 <literal>SessionFactory</"
-"literal> 的自由。通常,你会用更加完备的设计,把数据访问代码转移到数据访问对象"
-"中(DAO 模式)。请参见 Hibernate Wiki,那里有更多的例子。 "
+msgid "The servlet is now complete. A request to the servlet will be processed in a single <literal>Session</literal> and <literal>Transaction</literal>. As earlier in the standalone application, Hibernate can automatically bind these objects to the current thread of execution. This gives you the freedom to layer your code and access the <literal>SessionFactory</literal> in any way you like. Usually you would use a more sophisticated design and move the data access code into data access objects (the DAO pattern). See the Hibernate Wiki for more examples."
+msgstr "大功告成,这个 servlet 写完了。Hibernate 会在单一的 <literal>Session</literal> 和 <literal>Transaction</literal> 中处理到达的 servlet 请求。如同在前面的独立应用程序中那样,Hibernate 可以自动的把这些对象绑定到当前运行的线程中。这给了你用任何你喜欢的方式来对代码分层及访问 <literal>SessionFactory</literal> 的自由。通常,你会用更加完备的设计,把数据访问代码转移到数据访问对象中(DAO 模式)。请参见 Hibernate Wiki,那里有更多的例子。 "
#. Tag: title
#, no-c-format
@@ -1561,51 +672,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"To deploy this application for testing we must create a Web ARchive (WAR). "
-"First we must define the WAR descriptor as <filename>src/main/webapp/WEB-INF/"
-"web.xml</filename>"
-msgstr ""
-"要部署这个应用程序以进行测试,我们必须出具一个 Web ARchive (WAR)。首先我们必"
-"须定义 WAR 描述符为 <filename>src/main/webapp/WEB-INF/web.xml</filename>。"
+msgid "To deploy this application for testing we must create a Web ARchive (WAR). First we must define the WAR descriptor as <filename>src/main/webapp/WEB-INF/web.xml</filename>"
+msgstr "要部署这个应用程序以进行测试,我们必须出具一个 Web ARchive (WAR)。首先我们必须定义 WAR 描述符为 <filename>src/main/webapp/WEB-INF/web.xml</filename>。"
#. Tag: para
#, no-c-format
-msgid ""
-"To build and deploy call <literal>mvn package</literal> in your project "
-"directory and copy the <filename>hibernate-tutorial.war</filename> file into "
-"your Tomcat <filename>webapps</filename> directory."
-msgstr ""
-"在你的开发目录中,调用 <literal>ant war</literal> 来构建、打包,然后把 "
-"<literal>hibernate-tutorial.war</literal> 文件拷贝到你的 tomcat 的 "
-"<literal>webapps</literal> 目录下。假若你还没安装 Tomcat,就去下载一个,按照"
-"指南来安装。对此应用的发布,你不需要修改任何 Tomcat 的配置。 "
+msgid "To build and deploy call <literal>mvn package</literal> in your project directory and copy the <filename>hibernate-tutorial.war</filename> file into your Tomcat <filename>webapps</filename> directory."
+msgstr "在你的开发目录中,调用 <literal>ant war</literal> 来构建、打包,然后把 <literal>hibernate-tutorial.war</literal> 文件拷贝到你的 tomcat 的 <literal>webapps</literal> 目录下。假若你还没安装 Tomcat,就去下载一个,按照指南来安装。对此应用的发布,你不需要修改任何 Tomcat 的配置。 "
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"If you do not have Tomcat installed, download it from <ulink url=\"http://"
-"tomcat.apache.org/\" /> and follow the installation instructions. Our "
-"application requires no changes to the standard Tomcat configuration."
-msgstr ""
-"如果你还没有安装 Tomcat,请从 <ulink url=\"http://tomcat.apache.org/\" /> 下"
-"载并按照安装说明进行安装。我们的应用程序不需要对标准的 Tomcat 配置进行修改。"
+#, no-c-format
+msgid "If you do not have Tomcat installed, download it from <ulink url=\"http://tomcat.apache.org/\" /> and follow the installation instructions. Our application requires no changes to the standard Tomcat configuration."
+msgstr "如果你还没有安装 Tomcat,请从 <ulink url=\"http://tomcat.apache.org/\" /> 下载并按照安装说明进行安装。我们的应用程序不需要对标准的 Tomcat 配置进行修改。 "
#. Tag: para
#, no-c-format
-msgid ""
-"Once deployed and Tomcat is running, access the application at "
-"<literal>http://localhost:8080/hibernate-tutorial/eventmanager</literal>. "
-"Make sure you watch the Tomcat log to see Hibernate initialize when the "
-"first request hits your servlet (the static initializer in "
-"<literal>HibernateUtil</literal> is called) and to get the detailed output "
-"if any exceptions occurs."
-msgstr ""
-"在部署完,启动 Tomcat 之后,通过 <literal>http://localhost:8080/hibernate-"
-"tutorial/eventmanager</literal> 进行访问你的应用,在第一次 servlet 请求发生"
-"时,请在 Tomcat log 中确认你看到 Hibernate 被初始化了"
-"(<literal>HibernateUtil</literal> 的静态初始化器被调用),假若有任何异常抛"
-"出,也可以看到详细的输出。"
+msgid "Once deployed and Tomcat is running, access the application at <literal>http://localhost:8080/hibernate-tutorial/eventmanager</literal>. Make sure you watch the Tomcat log to see Hibernate initialize when the first request hits your servlet (the static initializer in <literal>HibernateUtil</literal> is called) and to get the detailed output if any exceptions occurs."
+msgstr "在部署完,启动 Tomcat 之后,通过 <literal>http://localhost:8080/hibernate-tutorial/eventmanager</literal> 进行访问你的应用,在第一次 servlet 请求发生时,请在 Tomcat log 中确认你看到 Hibernate 被初始化了(<literal>HibernateUtil</literal> 的静态初始化器被调用),假若有任何异常抛出,也可以看到详细的输出。"
#. Tag: title
#, no-c-format
@@ -1614,14 +697,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This tutorial covered the basics of writing a simple standalone Hibernate "
-"application and a small web application. More tutorials are available from "
-"the Hibernate <ulink url=\"http://hibernate.org\">website</ulink>."
-msgstr ""
-"本章覆盖了如何编写一个简单独立的 Hibernate 命令行应用程序及小型的 Hibernate "
-"web 应用程序的基本要素。更多的教程可以在 <ulink url=\"http://hibernate.org"
-"\">website</ulink> 上找到。"
+msgid "This tutorial covered the basics of writing a simple standalone Hibernate application and a small web application. More tutorials are available from the Hibernate <ulink url=\"http://hibernate.org\">website</ulink>."
+msgstr "本章覆盖了如何编写一个简单独立的 Hibernate 命令行应用程序及小型的 Hibernate web 应用程序的基本要素。更多的教程可以在 <ulink url=\"http://hibernate.org\">website</ulink> 上找到。"
#~ msgid ""
#~ "<![CDATA[<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n"
@@ -1719,7 +796,6 @@
#~ " </dependencies>\n"
#~ "\n"
#~ "</project>]]>"
-
#~ msgid ""
#~ "<![CDATA[package org.hibernate.tutorial.domain;\n"
#~ "\n"
@@ -1794,7 +870,6 @@
#~ " this.title = title;\n"
#~ " }\n"
#~ "}]]>"
-
#~ msgid ""
#~ "<![CDATA[<?xml version=\"1.0\"?>\n"
#~ "<!DOCTYPE hibernate-mapping PUBLIC\n"
@@ -1813,7 +888,6 @@
#~ "<hibernate-mapping package=\"org.hibernate.tutorial.domain\">\n"
#~ "[...]\n"
#~ "</hibernate-mapping>]]>"
-
#~ msgid ""
#~ "<![CDATA[<hibernate-mapping package=\"org.hibernate.tutorial.domain\">\n"
#~ "\n"
@@ -1830,7 +904,6 @@
#~ " </class>\n"
#~ "\n"
#~ "</hibernate-mapping>]]>"
-
#~ msgid ""
#~ "<![CDATA[<hibernate-mapping package=\"org.hibernate.tutorial.domain\">\n"
#~ "\n"
@@ -1851,7 +924,6 @@
#~ " </class>\n"
#~ "\n"
#~ "</hibernate-mapping>]]>"
-
#~ msgid ""
#~ "<![CDATA[\n"
#~ "<hibernate-mapping package=\"org.hibernate.tutorial.domain\">\n"
@@ -1880,7 +952,6 @@
#~ " </class>\n"
#~ "\n"
#~ "</hibernate-mapping>]]>"
-
#~ msgid ""
#~ "<![CDATA[<?xml version='1.0' encoding='utf-8'?>\n"
#~ "<!DOCTYPE hibernate-configuration PUBLIC\n"
@@ -2891,3 +1962,4 @@
#~ " <url-pattern>/eventmanager</url-pattern>\n"
#~ " </servlet-mapping>\n"
#~ "</web-app>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/content/xml.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/content/xml.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/content/xml.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -6,7 +6,7 @@
"Project-Id-Version: Collection_Mapping\n"
"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
"POT-Creation-Date: 2010-02-10T07:25:35\n"
-"PO-Revision-Date: 2009-12-07 21:18+1000\n"
+"PO-Revision-Date: 2010-03-16 10:02+1000\n"
"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: <en(a)li.org>\n"
"MIME-Version: 1.0\n"
@@ -20,13 +20,9 @@
msgstr "XML 映射"
#. Tag: para
-#, fuzzy, no-c-format
-msgid ""
-"<emphasis> XML Mapping is an experimental feature in Hibernate 3.0 and is "
-"currently under active development. </emphasis>"
-msgstr ""
-"<emphasis>注意 XML 映射只是 Hibernate 3.0 的一个实验性的特性。这一特性仍在积"
-"极开发中。</emphasis>"
+#, no-c-format
+msgid "<emphasis> XML Mapping is an experimental feature in Hibernate 3.0 and is currently under active development. </emphasis>"
+msgstr "<emphasis>注意 XML 映射只是 Hibernate 3.0 的一个实验性的特性。这一特性仍在积极开发中。</emphasis> "
#. Tag: title
#, no-c-format
@@ -35,51 +31,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate allows you to work with persistent XML data in much the same way "
-"you work with persistent POJOs. A parsed XML tree can be thought of as "
-"another way of representing the relational data at the object level, instead "
-"of POJOs."
-msgstr ""
-"Hibernate 使得你可以用 XML 数据来进行工作,恰如你用持久化的 POJO 进行工作那"
-"样。解析过的 XML 树 可以被认为是代替 POJO 的另外一种在对象层面上表示关系型数"
-"据的途径。"
+msgid "Hibernate allows you to work with persistent XML data in much the same way you work with persistent POJOs. A parsed XML tree can be thought of as another way of representing the relational data at the object level, instead of POJOs."
+msgstr "Hibernate 使得你可以用 XML 数据来进行工作,恰如你用持久化的 POJO 进行工作那样。解析过的 XML 树 可以被认为是代替 POJO 的另外一种在对象层面上表示关系型数据的途径。"
#. Tag: para
#, no-c-format
-msgid ""
-"Hibernate supports dom4j as API for manipulating XML trees. You can write "
-"queries that retrieve dom4j trees from the database and have any "
-"modification you make to the tree automatically synchronized to the "
-"database. You can even take an XML document, parse it using dom4j, and write "
-"it to the database with any of Hibernate's basic operations: <literal>persist"
-"(), saveOrUpdate(), merge(), delete(), replicate()</literal> (merging is not "
-"yet supported)."
-msgstr ""
-"Hibernate 支持采用 dom4j 作为操作 XML 树的 API。你可以写一些查询从数据库中检"
-"索出 dom4j 树,随后你对这颗树做的任何修改都将自动同步回数据库。你甚至可以用 "
-"dom4j 解析 一篇 XML 文档,然后使用 Hibernate 的任一基本操作将它写入数据库:"
-"<literal>persist(),saveOrUpdate(),merge(),delete(),replicate()</literal> "
-"(合并操作merge()目前还不支持)。"
+msgid "Hibernate supports dom4j as API for manipulating XML trees. You can write queries that retrieve dom4j trees from the database and have any modification you make to the tree automatically synchronized to the database. You can even take an XML document, parse it using dom4j, and write it to the database with any of Hibernate's basic operations: <literal>persist(), saveOrUpdate(), merge(), delete(), replicate()</literal> (merging is not yet supported)."
+msgstr "Hibernate 支持采用 dom4j 作为操作 XML 树的 API。你可以写一些查询从数据库中检索出 dom4j 树,随后你对这颗树做的任何修改都将自动同步回数据库。你甚至可以用 dom4j 解析 一篇 XML 文档,然后使用 Hibernate 的任一基本操作将它写入数据库:<literal>persist(),saveOrUpdate(),merge(),delete(),replicate()</literal> (合并操作merge()目前还不支持)。"
#. Tag: para
#, no-c-format
-msgid ""
-"This feature has many applications including data import/export, "
-"externalization of entity data via JMS or SOAP and XSLT-based reporting."
-msgstr ""
-"这一特性可以应用在很多场合,包括数据导入导出,通过 JMS 或 SOAP 具体化实体数据"
-"以及 基于 XSLT 的报表。"
+msgid "This feature has many applications including data import/export, externalization of entity data via JMS or SOAP and XSLT-based reporting."
+msgstr "这一特性可以应用在很多场合,包括数据导入导出,通过 JMS 或 SOAP 具体化实体数据以及 基于 XSLT 的报表。"
#. Tag: para
#, no-c-format
-msgid ""
-"A single mapping can be used to simultaneously map properties of a class and "
-"nodes of an XML document to the database, or, if there is no class to map, "
-"it can be used to map just the XML."
-msgstr ""
-"一个单一的映射就可以将类的属性和 XML 文档的节点同时映射到数据库。如果不需要映"
-"射类,它也可以用来只映射 XML 文档。 "
+msgid "A single mapping can be used to simultaneously map properties of a class and nodes of an XML document to the database, or, if there is no class to map, it can be used to map just the XML."
+msgstr "一个单一的映射就可以将类的属性和 XML 文档的节点同时映射到数据库。如果不需要映射类,它也可以用来只映射 XML 文档。 "
#. Tag: title
#, no-c-format
@@ -103,14 +71,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"This mapping allows you to access the data as a dom4j tree, or as a graph of "
-"property name/value pairs or java <literal>Map</literal>s. The property "
-"names are purely logical constructs that can be referred to in HQL queries."
-msgstr ""
-"这个映射使得你既可以把数据作为一棵 dom4j 树那样访问,又可以作为由属性键值对"
-"(java <literal>Map</literal>)组成的图那样访问。属性名字纯粹是逻辑上的结构,"
-"你可以在 HQL 查询中引用它。"
+msgid "This mapping allows you to access the data as a dom4j tree, or as a graph of property name/value pairs or java <literal>Map</literal>s. The property names are purely logical constructs that can be referred to in HQL queries."
+msgstr "这个映射使得你既可以把数据作为一棵 dom4j 树那样访问,又可以作为由属性键值对(java <literal>Map</literal>)组成的图那样访问。属性名字纯粹是逻辑上的结构,你可以在 HQL 查询中引用它。"
#. Tag: title
#, no-c-format
@@ -119,15 +81,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"A range of Hibernate mapping elements accept the <literal>node</literal> "
-"attribute. This lets you specify the name of an XML attribute or element "
-"that holds the property or entity data. The format of the <literal>node</"
-"literal> attribute must be one of the following:"
-msgstr ""
-"许多 Hibernate 映射元素具有 <literal>node</literal> 属性。这使你可以指定用来"
-"保存 属性或实体数据的 XML 属性或元素。<literal>node</literal> 属性必须是下列"
-"格式之一: "
+msgid "A range of Hibernate mapping elements accept the <literal>node</literal> attribute. This lets you specify the name of an XML attribute or element that holds the property or entity data. The format of the <literal>node</literal> attribute must be one of the following:"
+msgstr "许多 Hibernate 映射元素具有 <literal>node</literal> 属性。这使你可以指定用来保存 属性或实体数据的 XML 属性或元素。<literal>node</literal> 属性必须是下列格式之一: "
#. Tag: para
#, no-c-format
@@ -146,47 +101,23 @@
#. Tag: para
#, no-c-format
-msgid ""
-"<literal>\"element-name/@attribute-name\"</literal>: map to the named "
-"attribute of the named element"
-msgstr ""
-"<literal>\"element-name/@attribute-name\"</literal>:映射为指定元素的指定属性"
+msgid "<literal>\"element-name/@attribute-name\"</literal>: map to the named attribute of the named element"
+msgstr "<literal>\"element-name/@attribute-name\"</literal>:映射为指定元素的指定属性"
#. Tag: para
#, no-c-format
-msgid ""
-"For collections and single valued associations, there is an additional "
-"<literal>embed-xml</literal> attribute. If <literal>embed-xml=\"true\"</"
-"literal>, the default, the XML tree for the associated entity (or collection "
-"of value type) will be embedded directly in the XML tree for the entity that "
-"owns the association. Otherwise, if <literal>embed-xml=\"false\"</literal>, "
-"then only the referenced identifier value will appear in the XML for single "
-"point associations and collections will not appear at all."
-msgstr ""
-"对于集合和单值的关联,有一个额外的 <literal>embed-xml</literal> 属性可用。这"
-"个属性的缺省值是真(<literal>embed-xml=\"true\"</literal>)。如果 "
-"<literal>embed-xml=\"true\"</literal>,则对应于被关联实体或值类型的集合的XML"
-"树将直接嵌入拥有这些关联的实体的 XML 树中。否则,如果 <literal>embed-xml="
-"\"false\"</literal>,那么对于单值的关联,仅被引用的实体的标识符出现在 XML 树"
-"中(被引用实体本身不出现),而集合则根本不出现。"
+msgid "For collections and single valued associations, there is an additional <literal>embed-xml</literal> attribute. If <literal>embed-xml=\"true\"</literal>, the default, the XML tree for the associated entity (or collection of value type) will be embedded directly in the XML tree for the entity that owns the association. Otherwise, if <literal>embed-xml=\"false\"</literal>, then only the referenced identifier value will appear in the XML for single point associations and collections will not appear at all."
+msgstr "对于集合和单值的关联,有一个额外的 <literal>embed-xml</literal> 属性可用。这个属性的缺省值是真(<literal>embed-xml=\"true\"</literal>)。如果 <literal>embed-xml=\"true\"</literal>,则对应于被关联实体或值类型的集合的XML树将直接嵌入拥有这些关联的实体的 XML 树中。否则,如果 <literal>embed-xml=\"false\"</literal>,那么对于单值的关联,仅被引用的实体的标识符出现在 XML 树中(被引用实体本身不出现),而集合则根本不出现。"
#. Tag: para
#, no-c-format
-msgid ""
-"Do not leave <literal>embed-xml=\"true\"</literal> for too many "
-"associations, since XML does not deal well with circularity."
-msgstr ""
-"你应该小心,不要让太多关联的 embed-xml 属性为真(<literal>embed-xml=\"true"
-"\"</literal>),因为 XML 不能很好地处理循环引用。"
+msgid "Do not leave <literal>embed-xml=\"true\"</literal> for too many associations, since XML does not deal well with circularity."
+msgstr "你应该小心,不要让太多关联的 embed-xml 属性为真(<literal>embed-xml=\"true\"</literal>),因为 XML 不能很好地处理循环引用。"
#. Tag: para
#, no-c-format
-msgid ""
-"In this case, the collection of account ids is embedded, but not the actual "
-"account data. The following HQL query:"
-msgstr ""
-"在这个例子中,我们决定嵌入帐目号码(account id)的集合,但不嵌入实际的帐目数"
-"据。下面的 HQL 查询: "
+msgid "In this case, the collection of account ids is embedded, but not the actual account data. The following HQL query:"
+msgstr "在这个例子中,我们决定嵌入帐目号码(account id)的集合,但不嵌入实际的帐目数据。下面的 HQL 查询: "
#. Tag: para
#, no-c-format
@@ -195,12 +126,8 @@
#. Tag: para
#, no-c-format
-msgid ""
-"If you set <literal>embed-xml=\"true\"</literal> on the <literal><one-to-"
-"many></literal> mapping, the data might look more like this:"
-msgstr ""
-"如果你把一对多映射 <literal><one-to-many></literal> 的 embed-xml 属性置"
-"为真(<literal>embed-xml=\"true\"</literal>),则数据看上去就像这样:"
+msgid "If you set <literal>embed-xml=\"true\"</literal> on the <literal><one-to-many></literal> mapping, the data might look more like this:"
+msgstr "如果你把一对多映射 <literal><one-to-many></literal> 的 embed-xml 属性置为真(<literal>embed-xml=\"true\"</literal>),则数据看上去就像这样:"
#. Tag: title
#, no-c-format
@@ -209,21 +136,13 @@
#. Tag: para
#, no-c-format
-msgid ""
-"You can also re-read and update XML documents in the application. You can do "
-"this by obtaining a dom4j session:"
-msgstr ""
-"你也可以重新读入和更新应用程序中的 XML 文档。通过获取一个 dom4j 会话可以做到"
-"这一点:"
+msgid "You can also re-read and update XML documents in the application. You can do this by obtaining a dom4j session:"
+msgstr "你也可以重新读入和更新应用程序中的 XML 文档。通过获取一个 dom4j 会话可以做到这一点:"
#. Tag: para
#, no-c-format
-msgid ""
-"When implementing XML-based data import/export, it is useful to combine this "
-"feature with Hibernate's <literal>replicate()</literal> operation."
-msgstr ""
-"将这一特色与 Hibernate 的 <literal>replicate()</literal> 操作结合起来对于实现"
-"的基于 XML 的数据导入/导出将非常有用。"
+msgid "When implementing XML-based data import/export, it is useful to combine this feature with Hibernate's <literal>replicate()</literal> operation."
+msgstr "将这一特色与 Hibernate 的 <literal>replicate()</literal> 操作结合起来对于实现的基于 XML 的数据导入/导出将非常有用。"
#~ msgid ""
#~ "<![CDATA[<class name=\"Account\" \n"
@@ -267,7 +186,6 @@
#~ " ...\n"
#~ " \n"
#~ "</class>]]>"
-
#~ msgid ""
#~ "<![CDATA[<class entity-name=\"Account\" \n"
#~ " table=\"ACCOUNTS\" \n"
@@ -316,7 +234,6 @@
#~ " ...\n"
#~ " \n"
#~ "</class>]]>"
-
#~ msgid ""
#~ "<![CDATA[<class name=\"Customer\" \n"
#~ " table=\"CUSTOMER\" \n"
@@ -387,14 +304,12 @@
#~ " ...\n"
#~ " \n"
#~ "</class>]]>"
-
#~ msgid ""
#~ "<![CDATA[from Customer c left join fetch c.accounts where c.lastName "
#~ "like :lastName]]>"
#~ msgstr ""
#~ "<![CDATA[from Customer c left join fetch c.accounts where c.lastName "
#~ "like :lastName]]>"
-
#~ msgid ""
#~ "<![CDATA[<customer id=\"123456789\">\n"
#~ " <account short-desc=\"Savings\">987632567</account>\n"
@@ -417,7 +332,6 @@
#~ " </name>\n"
#~ " ...\n"
#~ "</customer>]]>"
-
#~ msgid ""
#~ "<![CDATA[<customer id=\"123456789\">\n"
#~ " <account id=\"987632567\" short-desc=\"Savings\">\n"
@@ -452,7 +366,6 @@
#~ " </name>\n"
#~ " ...\n"
#~ "</customer>]]>"
-
#~ msgid ""
#~ "<![CDATA[Document doc = ....;\n"
#~ " \n"
@@ -491,7 +404,6 @@
#~ "\n"
#~ "tx.commit();\n"
#~ "session.close();]]>"
-
#~ msgid ""
#~ "<![CDATA[Session session = factory.openSession();\n"
#~ "Session dom4jSession = session.getSession(EntityMode.DOM4J);\n"
@@ -526,3 +438,4 @@
#~ "\n"
#~ "tx.commit();\n"
#~ "session.close();]]>"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Conventions.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Conventions.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Conventions.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -5,210 +5,210 @@
msgstr ""
"Project-Id-Version: 0\n"
"POT-Creation-Date: 2010-02-04T04:51:23\n"
-"PO-Revision-Date: 2010-02-04T04:51:23\n"
-"Last-Translator: Automatically generated\n"
+"PO-Revision-Date: 2010-03-16 11:39+1000\n"
+"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: None\n"
"MIME-Version: 1.0\n"
-"Content-Type: application/x-publican; charset=UTF-8\n"
+"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#. Tag: title
#, no-c-format
msgid "Document Conventions"
-msgstr ""
+msgstr "文档规则"
#. Tag: para
#, no-c-format
msgid "This manual uses several conventions to highlight certain words and phrases and draw attention to specific pieces of information."
-msgstr ""
+msgstr "本手册使用了几种规则来突出某些文字和短语以及某些信息。"
#. Tag: para
#, no-c-format
msgid "In PDF and paper editions, this manual uses typefaces drawn from the <ulink url=\"https://fedorahosted.org/liberation-fonts/\">Liberation Fonts</ulink> set. The Liberation Fonts set is also used in HTML editions if the set is installed on your system. If not, alternative but equivalent typefaces are displayed. Note: Red Hat Enterprise Linux 5 and later includes the Liberation Fonts set by default."
-msgstr ""
+msgstr "在 PDF 版本里,本手册使用 <ulink url=\"https://fedorahosted.org/liberation-fonts/\">Liberation Fonts</ulink> 集里抽取的 typefaces。如果你安装了 Liberation Fonts 集,它也会用在 HTML 版本里。如果没有安装,则会使用其他相等的 typefaces。请注意,红帽企业版 Linux 5 和之后的版本缺省都包含了 Liberation Fonts 集。"
#. Tag: title
#, no-c-format
msgid "Typographic Conventions"
-msgstr ""
+msgstr "印刷规则"
#. Tag: para
#, no-c-format
msgid "Four typographic conventions are used to call attention to specific words and phrases. These conventions, and the circumstances they apply to, are as follows."
-msgstr ""
+msgstr "我们使用了四种印刷规则来突出特殊的文字和短语。下面是这些规则以及其适用的情况。"
#. Tag: para
#, no-c-format
msgid "<literal>Mono-spaced Bold</literal>"
-msgstr ""
+msgstr "<literal>Mono-spaced Bold</literal>"
#. Tag: para
#, no-c-format
msgid "Used to highlight system input, including shell commands, file names and paths. Also used to highlight keycaps and key combinations. For example:"
-msgstr ""
+msgstr "用来高亮显示系统输入,包括 shell 命令、文件名和路径。它也用于高亮显示键和键组合。例如:"
#. Tag: para
#, no-c-format
msgid "To see the contents of the file <filename>my_next_bestselling_novel</filename> in your current working directory, enter the <command>cat my_next_bestselling_novel</command> command at the shell prompt and press <keycap>Enter</keycap> to execute the command."
-msgstr ""
+msgstr "要查看当前工作目录里的 <filename>my_next_bestselling_novel</filename> 的内容,请在 shell 提示符下输入 <command>cat my_next_bestselling_novel</command> 并按<keycap>Enter</keycap> 来执行命令。"
#. Tag: para
#, no-c-format
msgid "The above includes a file name, a shell command and a keycap, all presented in mono-spaced bold and all distinguishable thanks to context."
-msgstr ""
+msgstr "上面包括了一个文件名、一个 shell 命令和键,它们都以 mono-spaced bold 出现并和上下文区分。"
#. Tag: para
#, no-c-format
msgid "Key combinations can be distinguished from keycaps by the hyphen connecting each part of a key combination. For example:"
-msgstr ""
+msgstr "键组合可以通过连字符和键来区分。例如:"
#. Tag: para
#, no-c-format
msgid "Press <keycap>Enter</keycap> to execute the command."
-msgstr ""
+msgstr "按 <keycap>Enter</keycap> 来执行这个命令。"
#. Tag: para
#, no-c-format
msgid "Press <keycombo><keycap>Ctrl</keycap><keycap>Alt</keycap><keycap>F1</keycap></keycombo> to switch to the first virtual terminal. Press <keycombo><keycap>Ctrl</keycap><keycap>Alt</keycap><keycap>F7</keycap></keycombo> to return to your X-Windows session."
-msgstr ""
+msgstr "按 <keycombo><keycap>Ctrl</keycap><keycap>Alt</keycap><keycap>F1</keycap></keycombo> 切换至第一个虚拟终端。按 <keycombo><keycap>Ctrl</keycap><keycap>Alt</keycap><keycap>F7</keycap></keycombo> 返回到 X-Windows 会话。"
#. Tag: para
#, no-c-format
msgid "The first paragraph highlights the particular keycap to press. The second highlights two key combinations (each a set of three keycaps with each set pressed simultaneously)."
-msgstr ""
+msgstr "第一段高亮显示了特殊的键。第二段高亮显示了两个键组合(每个都由三个同时按住的键构成)。"
#. Tag: para
#, no-c-format
msgid "If source code is discussed, class names, methods, functions, variable names and returned values mentioned within a paragraph will be presented as above, in <literal>mono-spaced bold</literal>. For example:"
-msgstr ""
+msgstr "在源码中提及的类名、方法、函数、变量和返回值都将以 <literal>mono-spaced bold</literal> 出现。例如:"
#. Tag: para
#, no-c-format
msgid "File-related classes include <classname>filesystem</classname> for file systems, <classname>file</classname> for files, and <classname>dir</classname> for directories. Each class has its own associated set of permissions."
-msgstr ""
+msgstr "文件相关的类包括用于文件系统的 <classname>filesystem</classname>、用于文件的 <classname>file</classname> 和用于目录的 <classname>dir</classname>。每个类都有自己的一套权限。"
#. Tag: para
#, no-c-format
msgid "<application>Proportional Bold</application>"
-msgstr ""
+msgstr "<application>Proportional Bold</application>"
#. Tag: para
#, no-c-format
msgid "This denotes words or phrases encountered on a system, including application names; dialog box text; labeled buttons; check-box and radio button labels; menu titles and sub-menu titles. For example:"
-msgstr ""
+msgstr "它表示系统里的文字或短语,包括程序名、对话框文本、标签按钮、复选框和单选框、菜单和子菜单标题。例如:"
#. Tag: para
#, no-c-format
msgid "Choose <menuchoice><guimenu>System</guimenu><guisubmenu>Preferences</guisubmenu><guimenuitem>Mouse</guimenuitem></menuchoice> from the main menu bar to launch <application>Mouse Preferences</application>. In the <guilabel>Buttons</guilabel> tab, click the <guilabel>Left-handed mouse</guilabel> check box and click <guibutton>Close</guibutton> to switch the primary mouse button from the left to the right (making the mouse suitable for use in the left hand)."
-msgstr ""
+msgstr "从主菜单选择 <menuchoice><guimenu>System</guimenu><guisubmenu>Preferences</guisubmenu><guimenuitem>Mouse</guimenuitem></menuchoice> 来启动 <application>Mouse Preferences</application>。在 <guilabel>Buttons</guilabel> 标签页里,点击 <guilabel>Left-handed mouse</guilabel> 复选框并点击 <guibutton>Close</guibutton> 来把鼠标主按钮从左切换到右(适合于习惯用左手的人)。"
#. Tag: para
#, no-c-format
msgid "To insert a special character into a <application>gedit</application> file, choose <menuchoice><guimenu>Applications</guimenu><guisubmenu>Accessories</guisubmenu><guimenuitem>Character Map</guimenuitem></menuchoice> from the main menu bar. Next, choose <menuchoice><guimenu>Search</guimenu><guimenuitem>Find…</guimenuitem></menuchoice> from the <application>Character Map</application> menu bar, type the name of the character in the <guilabel>Search</guilabel> field and click <guibutton>Next</guibutton>. The character you sought will be highlighted in the <guilabel>Character Table</guilabel>. Double-click this highlighted character to place it in the <guilabel>Text to copy</guilabel> field and then click the <guibutton>Copy</guibutton> button. Now switch back to your document and choose <menuchoice><guimenu>Edit</guimenu><guimenuitem>Paste</guimenuitem></menuchoice> from the <application>gedit</application> menu bar."
-msgstr ""
+msgstr "要在 <application>gedit</application> 文件里插入特殊字符,从主菜单里选择 <menuchoice><guimenu>Applications</guimenu><guisubmenu>Accessories</guisubmenu><guimenuitem>Character Map</guimenuitem></menuchoice>。然后,从 <application>Character Map</application> 菜单条里选择 <menuchoice><guimenu>Search</guimenu><guimenuitem>Find…</guimenuitem></menuchoice>,在 <guilabel>Search</guilabel>字段里输入字符并点击 <guibutton>Next</guibutton>。你要搜索的字符会在 <guilabel>Character Table</guilabel> 里高亮显示。双击这个高亮显示的字符并放入 <guilabel>Text to copy</guilabel> 字段里,然后点击 <guibutton>Copy</guibutton> 按钮。现在切换回你的文档并从 <application>gedit</application> 菜单条里选择<menuchoice><guimenu>Edit</guimenu><guimenuitem>Paste</guimenuitem></menuchoice>。"
#. Tag: para
#, no-c-format
msgid "The above text includes application names; system-wide menu names and items; application-specific menu names; and buttons and text found within a GUI interface, all presented in proportional bold and all distinguishable by context."
-msgstr ""
+msgstr "上面的内容包括了程序名称、系统级的菜单名和条目、程序专有的菜单名、图形界面里的按钮和文本,它们都以 proportional bold 出现并和其他内容区分。"
#. Tag: para
#, no-c-format
msgid "<command><replaceable>Mono-spaced Bold Italic</replaceable></command> or <application><replaceable>Proportional Bold Italic</replaceable></application>"
-msgstr ""
+msgstr "<command><replaceable>Mono-spaced Bold Italic</replaceable></command> 或 <application><replaceable>Proportional Bold Italic</replaceable></application>"
#. Tag: para
#, no-c-format
msgid "Whether mono-spaced bold or proportional bold, the addition of italics indicates replaceable or variable text. Italics denotes text you do not input literally or displayed text that changes depending on circumstance. For example:"
-msgstr ""
+msgstr "无论是 mono-spaced bold 还是 proportional bold,斜体都表示可替换的或可变的文本。斜体表示非输入的文本,它根据周围的内容而变化。例如:"
#. Tag: para
#, no-c-format
msgid "To connect to a remote machine using ssh, type <command>ssh <replaceable>username</replaceable>@<replaceable>domain.name</replaceable></command> at a shell prompt. If the remote machine is <filename>example.com</filename> and your username on that machine is john, type <command>ssh john(a)example.com</command>."
-msgstr ""
+msgstr "要用 ssh 连接到远程机器,在 shell 提示下输入 <command>ssh <replaceable>username</replaceable>@<replaceable>domain.name</replaceable></command>。如果这个远程主机是 <filename>example.com</filename> 且你的用户名是 john,请输入 <command>ssh john(a)example.com</command>。"
#. Tag: para
#, no-c-format
msgid "The <command>mount -o remount <replaceable>file-system</replaceable></command> command remounts the named file system. For example, to remount the <filename>/home</filename> file system, the command is <command>mount -o remount /home</command>."
-msgstr ""
+msgstr "<command>mount -o remount <replaceable>file-system</replaceable></command> 命令重新挂载文件系统。例如,要重新挂载 <filename>/home</filename>,命令是 <command>mount -o remount /home</command>。"
#. Tag: para
#, no-c-format
msgid "To see the version of a currently installed package, use the <command>rpm -q <replaceable>package</replaceable></command> command. It will return a result as follows: <command><replaceable>package-version-release</replaceable></command>."
-msgstr ""
+msgstr "要查看当前安装的软件包的版本,你可以使用 <command>rpm -q <replaceable>package</replaceable></command> 命令。它将返回这样的结果集:<command><replaceable>package-version-release</replaceable></command>。"
#. Tag: para
#, no-c-format
msgid "Note the words in bold italics above — username, domain.name, file-system, package, version and release. Each word is a placeholder, either for text you enter when issuing a command or for text displayed by the system."
-msgstr ""
+msgstr "请注意上面字体为粗斜体的文字 — username, domain.name, file-system, package, version 和 release。无论是你输入的命令还是系统显示的文本,每个文字都是一个占位符。"
#. Tag: para
#, no-c-format
msgid "Aside from standard usage for presenting the title of a work, italics denotes the first use of a new and important term. For example:"
-msgstr ""
+msgstr "除了表示标题的标准用法之外,斜体表示新的和重要的术语的第一次使用。例如:"
#. Tag: para
#, no-c-format
msgid "Publican is a <firstterm>DocBook</firstterm> publishing system."
-msgstr ""
+msgstr "Publican 是一个基于 <firstterm>DocBook</firstterm> 的发布系统。"
#. Tag: title
#, no-c-format
msgid "Pull-quote Conventions"
-msgstr ""
+msgstr "副标题规则"
#. Tag: para
#, no-c-format
msgid "Terminal output and source code listings are set off visually from the surrounding text."
-msgstr ""
+msgstr "终端输出和源代码列表由周围的文本来烘托。"
#. Tag: para
#, no-c-format
msgid "Output sent to a terminal is set in <computeroutput>mono-spaced roman</computeroutput> and presented thus:"
-msgstr ""
+msgstr "发送到终端的输出在 <computeroutput>mono-spaced roman</computeroutput> 设置且以这种形式出现:"
#. Tag: para
#, no-c-format
msgid "Source-code listings are also set in <computeroutput>mono-spaced roman</computeroutput> but add syntax highlighting as follows:"
-msgstr ""
+msgstr "源码列表也在 <computeroutput>mono-spaced roman</computeroutput> 里设置但添加了如下的语法高亮显示:"
#. Tag: title
#, no-c-format
msgid "Notes and Warnings"
-msgstr ""
+msgstr "注意和警告"
#. Tag: para
#, no-c-format
msgid "Finally, we use three visual styles to draw attention to information that might otherwise be overlooked."
-msgstr ""
+msgstr "最后,我们使用三章可视风格来引起对可能被忽视的信息的注意。"
#. Tag: title
#, no-c-format
msgid "Note"
-msgstr ""
+msgstr "注意"
#. Tag: para
#, no-c-format
msgid "Notes are tips, shortcuts or alternative approaches to the task at hand. Ignoring a note should have no negative consequences, but you might miss out on a trick that makes your life easier."
-msgstr ""
+msgstr "注意是完成手中任务的提示、快捷方式或其他方法。忽略”注意“不会产生负面影响,但你可能会错失使你受益的技巧。"
#. Tag: title
#, no-c-format
msgid "Important"
-msgstr ""
+msgstr "重要信息"
#. Tag: para
#, no-c-format
msgid "Important boxes detail things that are easily missed: configuration changes that only apply to the current session, or services that need restarting before an update will apply. Ignoring a box labeled 'Important' won't cause data loss but may cause irritation and frustration."
-msgstr ""
+msgstr "重要信息显示容易被错过的信息:只适用于当前会话的配置改动、或需要重启来生效的服务。忽略标记为”重要信息“的对话框不会导致数据丢失但会引发问题。"
#. Tag: title
#, no-c-format
msgid "Warning"
-msgstr ""
+msgstr "警告"
#. Tag: para
#, no-c-format
msgid "Warnings should not be ignored. Ignoring warnings will most likely cause data loss."
-msgstr ""
+msgstr "警告不应该被忽略。忽略警告将导致数据丢失。"
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Feedback.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Feedback.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Feedback.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -5,19 +5,20 @@
msgstr ""
"Project-Id-Version: 0\n"
"POT-Creation-Date: 2010-03-12T00:03:48\n"
-"PO-Revision-Date: 2010-02-04T04:51:23\n"
-"Last-Translator: Automatically generated\n"
+"PO-Revision-Date: 2010-03-16 10:17+1000\n"
+"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: None\n"
"MIME-Version: 1.0\n"
-"Content-Type: application/x-publican; charset=UTF-8\n"
+"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#. Tag: title
#, no-c-format
msgid "We Need Feedback!"
-msgstr ""
+msgstr "我们需要你的反馈!"
#. Tag: para
#, no-c-format
msgid "You should over ride this by creating your own local Feedback.xml file."
-msgstr ""
+msgstr "用自己的 Feedback.xml 文件覆盖本段内容。"
+
Modified: core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Legal_Notice.po
===================================================================
--- core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Legal_Notice.po 2010-03-16 01:28:07 UTC (rev 19002)
+++ core/trunk/documentation/manual/src/main/docbook/zh-CN/fallback_content/Legal_Notice.po 2010-03-16 01:42:26 UTC (rev 19003)
@@ -5,15 +5,15 @@
msgstr ""
"Project-Id-Version: 0\n"
"POT-Creation-Date: 2010-02-04T04:51:23\n"
-"PO-Revision-Date: 2010-02-04T04:51:23\n"
-"Last-Translator: Automatically generated\n"
+"PO-Revision-Date: 2010-03-16 10:18+1000\n"
+"Last-Translator: Xi HUANG <xhuang(a)redhat.com>\n"
"Language-Team: None\n"
"MIME-Version: 1.0\n"
-"Content-Type: application/x-publican; charset=UTF-8\n"
+"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#. Tag: para
#, no-c-format
msgid "Copyright <trademark class=\"copyright\"></trademark> &YEAR; &HOLDER; This material may only be distributed subject to the terms and conditions set forth in the GNU Free Documentation License (GFDL), V1.2 or later (the latest version is presently available at <ulink url=\"http://www.gnu.org/licenses/fdl.txt\">http://www.gnu.org/licenses/fdl.txt</ulink>)."
-msgstr ""
+msgstr "Copyright <trademark class=\"copyright\"></trademark> &YEAR; &HOLDER; 本文档只可以按照 GNU Free Documentation License (GFDL), V1.2 或更新协议进行分发(GNU 的最新版本请参考 <ulink url=\"http://www.gnu.org/licenses/fdl.txt\">http://www.gnu.org/licenses/fdl.txt</ulink>)。"
15 years, 8 months
Hibernate SVN: r19002 - in search/trunk: hibernate-search and 103 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2010-03-15 21:28:07 -0400 (Mon, 15 Mar 2010)
New Revision: 19002
Added:
search/trunk/hibernate-search/
search/trunk/hibernate-search/pom.xml
search/trunk/hibernate-search/src/
search/trunk/hibernate-search/src/main/
search/trunk/hibernate-search/src/main/assembly/
search/trunk/hibernate-search/src/main/assembly/dist.xml
search/trunk/hibernate-search/src/main/docbook/
search/trunk/hibernate-search/src/main/docbook/en-US/
search/trunk/hibernate-search/src/main/docbook/en-US/images/
search/trunk/hibernate-search/src/main/docbook/en-US/images/hibernate_logo_a.png
search/trunk/hibernate-search/src/main/docbook/en-US/images/jms-backend.png
search/trunk/hibernate-search/src/main/docbook/en-US/images/lucene-backend.png
search/trunk/hibernate-search/src/main/docbook/en-US/master.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/
search/trunk/hibernate-search/src/main/docbook/en-US/modules/architecture.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/batchindex.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/configuration.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/getting-started.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/lucene-native.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/mapping.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/optimize.xml
search/trunk/hibernate-search/src/main/docbook/en-US/modules/query.xml
search/trunk/hibernate-search/src/main/docbook/pot/
search/trunk/hibernate-search/src/main/docbook/pot/master.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/
search/trunk/hibernate-search/src/main/docbook/pot/modules/architecture.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/batchindex.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/configuration.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/getting-started.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/lucene-native.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/mapping.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/optimize.pot
search/trunk/hibernate-search/src/main/docbook/pot/modules/query.pot
search/trunk/hibernate-search/src/main/docbook/zh-CN/
search/trunk/hibernate-search/src/main/docbook/zh-CN/master.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/architecture.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/batchindex.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/configuration.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/getting-started.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/lucene-native.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/mapping.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/optimize.po
search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/query.po
search/trunk/hibernate-search/src/main/java/
search/trunk/hibernate-search/src/main/java/org/
search/trunk/hibernate-search/src/main/java/org/hibernate/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/Environment.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextFilter.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextQuery.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextSession.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/MassIndexer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/ProjectionConstants.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/Search.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchException.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/Version.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/Discriminator.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Analyzer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDef.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDefs.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDiscriminator.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Boost.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/CalendarBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridges.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ContainedIn.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DateBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DocumentId.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DynamicBoost.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Factory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Field.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FieldBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Fields.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FilterCacheModeType.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDef.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDefs.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Index.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Indexed.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/IndexedEmbedded.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Key.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Parameter.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ProvidedId.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Resolution.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Similarity.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Store.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TermVector.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenFilterDef.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenizerDef.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/AddLuceneWork.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/BackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/DeleteLuceneWork.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneIndexingParameters.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneWork.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/OptimizeLuceneWork.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/PurgeAllLuceneWork.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/QueueingProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/TransactionContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Work.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkQueue.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkType.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkVisitor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Worker.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkerFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Workspace.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/ConfigurationParseHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/MaskedProperty.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/EventSourceTransactionContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/TransactionalWorker.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/BatchBackend.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/DirectoryProviderWorkspace.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/LuceneBatchBackend.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/BlackHoleBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsMasterMessageListener.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/MasterJGroupsBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/SlaveJGroupsBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/AbstractJMSHibernateSearchController.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/CloseIndexRunnable.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionVisitor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessorFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPResources.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDirectoryWorkProcessor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/QueueProcessors.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkVisitor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchCoordinator.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchIndexingWorkspace.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/EntityConsumerLuceneworkProducer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/Executors.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierConsumerEntityProducer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierProducer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/MassIndexerProgressMonitor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/ProducerConsumerQueue.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/BridgeFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/FieldBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/LuceneOptions.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/ParameterizedBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/StringBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayFieldBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayStringBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BooleanBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CalendarBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CharacterBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ClassBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateResolutionUtil.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DoubleBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/EnumBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/FloatBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/IntegerBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/LongBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/NumberBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ShortBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/StringBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UriBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UrlBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/AnalyzerDefMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/CalendarBridgeMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ClassBridgeMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ConcatStringBridge.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ContainedInMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DateBridgeMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DocumentIdMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityDescriptor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldBridgeMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FullTextFilterDefMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexEmbeddedMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexedMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyDescriptor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ProvidedIdMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfiguration.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/TokenFilterDefMapping.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/BoostStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DefaultBoostStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentExtractor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityInfo.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityState.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/FilterDef.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/Loader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LoaderHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LuceneOptionsImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/MultiClassesQueryLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoaderHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ProjectionLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/QueryLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/SearchFactoryImplementor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/ContextHolder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/EventListenerRegister.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexEventListener.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/AndDocIdSet.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/CachingWrapperFilter.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ChainedFilter.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterCachingStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterKey.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterOptimizationHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FullTextFilterImplementor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ShardSensitiveOnlyFilter.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/StandardFilterKey.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/FullTextSessionImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/InitContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MappingModelMetadataProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MassIndexerImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchFactoryImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchMappingBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SimpleIndexingProgressMonitor.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SolrAnalyzerBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextEntityManager.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextQuery.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/Search.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextFilterImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextQueryImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/IteratorImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/QueryHits.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/ScrollableResultsImpl.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/AbstractTermQueryBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BooleanContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BuildableTermQueryBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/NegatableBooleanContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/SealedQueryBuilder.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/TermQueryBuilderDataStore.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnField.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnSearch.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/CacheableMultiReader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/NotSharedReaderProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharedReaderProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharingBufferReaderProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextManager.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextQuery.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/InstanceTransactionContext.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/LuceneFullTextManager.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSDirectoryProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSMasterDirectoryProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IdHashShardingStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IndexShardingStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/LockFactoryFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/NotShardedStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/RAMDirectoryProvider.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/IncrementalOptimizerStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/NoOpOptimizerStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/OptimizerStrategy.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ContextHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/DelegateNamedAnalyzer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FileHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FilterCacheModeTypeHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/HibernateSearchResourceLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/LoggerFactory.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/PluginLoader.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ReflectionHelper.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ScopedAnalyzer.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/WeakIdentityHashMap.java
search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/XMLHelper.java
search/trunk/hibernate-search/src/main/javadoc/
search/trunk/hibernate-search/src/main/javadoc/jdstyle.css
search/trunk/hibernate-search/src/main/javadoc/package.html
search/trunk/hibernate-search/src/main/resources/
search/trunk/hibernate-search/src/test/
search/trunk/hibernate-search/src/test/java/
search/trunk/hibernate-search/src/test/java/org/
search/trunk/hibernate-search/src/test/java/org/hibernate/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/AlternateDocument.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Clock.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Document.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/FSDirectoryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/PurgeTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/RamDirectoryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SearchTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SerializationTestHelper.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TransactionTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AlarmEntity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AnalyzerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Article.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/BlogEntry.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/DoubleAnalyzerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/LanguageDiscriminator.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyComponent.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyEntity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test1Analyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test2Analyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test3Analyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test4Analyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/AnalyzerInheritanceTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/BaseClass.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/ISOLatin1Analyzer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/SubClass.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilter.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilterFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/SolrAnalyzerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/Team.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/AncientBook.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Book.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Dvd.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/IndexingGeneratedCorpusTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/ModernBook.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/SearchIndexerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/TitleAble.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/BridgeTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatDeptsFieldsClassBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatFieldsClassBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeAndProjectionTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Cloud.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CloudType.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/DateSplitBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Department.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Departments.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/EquipmentType.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Gangster.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Student.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/StudentsSizeBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Teacher.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateFieldBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateStringBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/Animal.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/NoAnnotationsTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Address.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/BlogEntry.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CatDeptsFieldsClassBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationParseHelperTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Country.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBackendTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBoostStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomFieldBoostStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Departments.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/DynamicBoostedDescLibrary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EquipmentType.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EventListenerRegisterTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Item.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/MaskedPropertiesTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProductCatalog.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticMappingTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticSearchMappingFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProvidedIdEntry.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/SecurityFilterFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/UselessShardingStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/User.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockFactoryFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockProviderTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/DirectoryProviderHelperTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/SnowStorm.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Address.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Author.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Country.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/EmbeddedTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/NonIndexedEntity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Order.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Owner.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Person.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Product.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/State.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/StateCandidate.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Tower.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Address.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/BusinessContact.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Contact.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/DoubleInsertEmbeddedTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/PersonalContact.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Phone.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Address.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Attribute.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/AttributeValue.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/NestedEmbeddedTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Person.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Place.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Product.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusLine.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusStop.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/EventListenerSerializationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/LazyCollectionsUpdatingTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/RollbackTransactionTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/Document.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/AndDocIdSetsTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/BestDriversFilter.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Driver.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilter.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilterFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FilterTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FiltersOptimizationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/SecurityFilterFactory.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Soap.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Animal.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/EmbeddedIdTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/ImplicitIdTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Person.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPK.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPKBridge.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ManualTransactionContext.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPerson.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPersonSub.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/StandaloneConf.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/ManualIndexingStrategyTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Animal.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Being.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Bird.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Eagle.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Fish.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/InheritanceTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Mammal.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/JGroupsCommonTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/MultipleSessionsSearchTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/JGroupsMasterTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/TShirt.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsReceiver.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsSlaveTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/TShirt.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/JMSMasterTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/MDBSearchController.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/TShirt.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/JMSSlaveTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/SearchQueueChecker.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/TShirt.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/Bretzel.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerSerializationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/JPATestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Construction.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/IncrementalOptimizerStrategyPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/OptimizerPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Worker.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/Boat.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/IndexTestDontRun.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/SearcherThread.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/AlternateBook.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Author.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Book.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Clock.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ElectricalProperties.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Employee.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQuerySortTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQueryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/MultiClassesQueryLoaderTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Music.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Person.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionQueryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToDelimStringResultTransformer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToMapResultTransformer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryLoaderTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryUnindexedEntityTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ScrollableResultsTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/TermVectorTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedDescriptionLibrary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedFieldDescriptionLibrary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedGetDescriptionLibrary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomBoostStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomFieldBoostStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostedDescriptionLibrary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostingTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/FieldBoostTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/Library.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/AbstractCar.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/Bike.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/CombiCar.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/MixedCriteriaTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/SportCar.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/dsl/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/Dvd.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/ExplanationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Detective.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/ReaderPerfTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedBufferedReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Suspect.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/FilterOnDirectoryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/SharingBufferIndexProviderTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/TestableSharingBufferReaderProvider.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/AbstractActivity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/BufferSharingReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/IndexFillRunnable.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/InsertActivity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/NotSharedReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/ReaderPerformance.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SearchActivity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SharedReaderPerfTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/UpdateActivity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Categorie.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/DelegationWrapper.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Domain.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Email.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Entite.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexUsingManualFlushTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/OptimizeTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/SessionTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Animal.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategyTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/DirectoryProviderForQueryTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Email.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Furniture.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/IdShardingStrategyTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/ShardsTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/SpecificShardingStrategy.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Can.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity2.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/IllegalSimilarityConfigurationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/LittleTrash.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/ProperTrashExtension.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SimilarityTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Sink.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SmallerCan.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Trash.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/AnalyzerUtils.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FileHelperTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FullTextSessionBuilder.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/PluginLoaderTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/SentenceInventor.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/TextProductionTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/WordDictionary.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/AsyncWorkerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/ConcurrencyTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Drink.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employee.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employer.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Food.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/SyncWorkerTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/WorkerTestCase.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/EmailAddress.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/Person.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/SpecialPerson.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java
search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkSequencesTest.java
search/trunk/hibernate-search/src/test/resources/
search/trunk/hibernate-search/src/test/resources/hibernate.properties
search/trunk/hibernate-search/src/test/resources/jndi.properties
search/trunk/hibernate-search/src/test/resources/log4j.properties
search/trunk/hibernate-search/src/test/resources/org/
search/trunk/hibernate-search/src/test/resources/org/hibernate/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/stoplist.properties
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/synonyms.properties
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/
search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/Animal.hbm.xml
Removed:
search/trunk/hibernate-search-archetype/src/main/archetype/
search/trunk/src/
Modified:
search/trunk/hibernate-search-archetype/pom.xml
search/trunk/pom.xml
Log:
HSEARCH-468 First step. Pushed main code into hibernate-search
Property changes on: search/trunk/hibernate-search
___________________________________________________________________
Name: svn:ignore
+ target
Added: search/trunk/hibernate-search/pom.xml
===================================================================
--- search/trunk/hibernate-search/pom.xml (rev 0)
+++ search/trunk/hibernate-search/pom.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,362 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <artifactId>hibernate-search-parent</artifactId>
+ <groupId>org.hibernate</groupId>
+ <version>3.2.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hibernate-search</artifactId>
+
+ <name>Hibernate Search</name>
+ <description>Hibernate Search</description>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-commons-annotations</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate.java-persistence</groupId>
+ <artifactId>jpa-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.transaction</groupId>
+ <artifactId>jta</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.activemq</groupId>
+ <artifactId>activemq-core</artifactId>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <defaultGoal>test</defaultGoal>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <systemProperties>
+ <property>
+ <name>build.dir</name>
+ <value>${basedir}/target</value>
+ </property>
+ <!--
+ Following is the default jgroups mcast address. If you find the testsuite runs very slowly,
+ there may be problems with multicast on the interface JGroups uses by default on
+ your machine. You can try to resolve setting 'jgroups.bind_addr' as a system-property
+ to the jvm launching maven and setting the value to an interface where you know multicast works
+ -->
+ <property>
+ <name>jgroups.bind_addr</name>
+ <value>127.0.0.1</value>
+ </property>
+ <!-- There are problems with multicast and IPv6 on some OS/JDK combos, so we tell Java
+ to use IPv4. If you have problems with multicast when running the tests you can
+ try setting this to 'false', although typically that won't be helpful.
+ -->
+ <property>
+ <name>java.net.preferIPv4Stack</name>
+ <value>true</value>
+ </property>
+ </systemProperties>
+ <excludes>
+ <exclude>**/*.java</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.maven.plugins</groupId>
+ <artifactId>maven-jdocbook-plugin</artifactId>
+ <configuration>
+ <sourceDocumentName>master.xml</sourceDocumentName>
+ <sourceDirectory>${basedir}/src/main/docbook</sourceDirectory>
+ <masterTranslation>en-US</masterTranslation>
+ <translations>
+ <translation>zh-CN</translation>
+ </translations>
+ <imageResource>
+ <directory>${basedir}/src/main/docbook/en-US/images</directory>
+ </imageResource>
+ <formats>
+ <format>
+ <formatName>pdf</formatName>
+ <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/pdf.xsl</stylesheetResource>
+ <finalName>hibernate_reference.pdf</finalName>
+ </format>
+ <format>
+ <formatName>html_single</formatName>
+ <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/xhtml-single.xsl
+ </stylesheetResource>
+ <finalName>index.html</finalName>
+ </format>
+ <format>
+ <formatName>html</formatName>
+ <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/xhtml.xsl
+ </stylesheetResource>
+ <finalName>index.html</finalName>
+ </format>
+ </formats>
+ <options>
+ <xincludeSupported>true</xincludeSupported>
+ <xmlTransformerType>saxon</xmlTransformerType>
+ <!-- needed for uri-resolvers; can be ommitted if using 'current' uri scheme -->
+ <!-- could also locate the docbook dependency and inspect its version... -->
+ <docbookVersion>1.72.0</docbookVersion>
+ <localeSeparator>-</localeSeparator>
+ </options>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <configuration>
+ <stylesheetfile>${basedir}/src/main/javadoc/jdstyle.css</stylesheetfile>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assembly/dist.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-release-plugin</artifactId>
+ <configuration>
+ <releaseProfiles>release</releaseProfiles>
+ <goals>package javadoc:javadoc org.jboss.maven.plugins:maven-jdocbook-plugin:2.2.0:resources
+ org.jboss.maven.plugins:maven-jdocbook-plugin:2.2.0:generate assembly:assembly
+ </goals>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.maven.plugins</groupId>
+ <artifactId>maven-injection-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>compile</phase>
+ <goals>
+ <goal>bytecode</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <bytecodeInjections>
+ <bytecodeInjection>
+ <expression>${pom.version}</expression>
+ <targetMembers>
+ <methodBodyReturn>
+ <className>org.hibernate.search.Version</className>
+ <methodName>getVersionString</methodName>
+ </methodBodyReturn>
+ </targetMembers>
+ </bytecodeInjection>
+ </bytecodeInjections>
+ </configuration>
+ </plugin>
+ </plugins>
+ <testResources>
+ <testResource>
+ <filtering>true</filtering>
+ <directory>src/test/resources</directory>
+ <includes>
+ <include>**/*.properties</include>
+ <include>**/*.xml</include>
+ </includes>
+ </testResource>
+ </testResources>
+ </build>
+
+ <profiles>
+ <!-- ================================ -->
+ <!-- Dependecy profiles to test w and -->
+ <!-- w/o optional dependencies -->
+ <!-- =============================== -->
+ <profile>
+ <id>with-optional-jars</id>
+ <activation>
+ <activeByDefault>true</activeByDefault>
+ </activation>
+ <dependencies>
+ <!-- =============================== -->
+ <!-- Optional Dependencies -->
+ <!-- =============================== -->
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-annotations</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-entitymanager</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-common</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-core</artifactId>
+ <optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-solrj</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>woodstox</groupId>
+ <artifactId>wstx-asl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>net.java.dev.stax-utils</groupId>
+ <artifactId>stax-utils</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-lucene-core</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-snowball</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analyzers</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-codec</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-io</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>javax.jms</groupId>
+ <artifactId>jms</artifactId>
+ <scope>provided</scope>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>jgroups</groupId>
+ <artifactId>jgroups</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>javax.annotation</groupId>
+ <artifactId>jsr250-api</artifactId>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <!--argLine>-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005</argLine-->
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <excludes>
+ <exclude>**/classloading/*.java</exclude>
+ <exclude>**/*PerfTest.java</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ <profile>
+ <id>without-optional-jars</id>
+ <dependencies>
+ <dependency>
+ <groupId>javassist</groupId>
+ <artifactId>javassist</artifactId>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <!--argLine>-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005</argLine-->
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <excludes>
+ <exclude>none</exclude>
+ <exclude>**/*PerfTest.java</exclude>
+ </excludes>
+ <includes>
+ <include>**/classloading/*Test.java</include>
+ </includes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+</project>
Property changes on: search/trunk/hibernate-search/src/main/assembly
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/assembly/dist.xml
===================================================================
--- search/trunk/hibernate-search/src/main/assembly/dist.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/assembly/dist.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<assembly>
+ <id>dist</id>
+ <formats>
+ <format>tar.gz</format>
+ <format>tar.bz2</format>
+ <format>zip</format>
+ </formats>
+
+ <dependencySets>
+ <dependencySet>
+ <useProjectArtifact>false</useProjectArtifact>
+ <outputDirectory>/dist/lib</outputDirectory>
+ <scope>runtime</scope>
+ </dependencySet>
+ </dependencySets>
+
+ <fileSets>
+ <fileSet>
+ <directory>target</directory>
+ <outputDirectory>/dist</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>target/site/apidocs</directory>
+ <outputDirectory>/dist/docs/api</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>target/docbook/publish/en-US</directory>
+ <outputDirectory>/dist/docs/manual</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>.</directory>
+ <outputDirectory></outputDirectory>
+ <useDefaultExcludes>true</useDefaultExcludes>
+ <excludes>
+ <exclude>target/**</exclude>
+ <exclude>hibernate-search-archetype/**</exclude>
+ </excludes>
+ </fileSet>
+ </fileSets>
+</assembly>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/images
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/docbook/en-US/images/hibernate_logo_a.png
===================================================================
(Binary files differ)
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/images/hibernate_logo_a.png
___________________________________________________________________
Name: svn:mime-type
+ application/octet-stream
Added: search/trunk/hibernate-search/src/main/docbook/en-US/images/jms-backend.png
===================================================================
(Binary files differ)
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/images/jms-backend.png
___________________________________________________________________
Name: svn:mime-type
+ application/octet-stream
Added: search/trunk/hibernate-search/src/main/docbook/en-US/images/lucene-backend.png
===================================================================
(Binary files differ)
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/images/lucene-backend.png
___________________________________________________________________
Name: svn:mime-type
+ application/octet-stream
Added: search/trunk/hibernate-search/src/main/docbook/en-US/master.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/master.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/master.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,98 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id$ -->
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd" [
+<!ENTITY version "WORKING">
+<!ENTITY copyrightYear "2004">
+<!ENTITY copyrightHolder "Red Hat Middleware, LLC.">
+]>
+<book lang="en">
+ <bookinfo>
+ <title>Hibernate Search</title>
+
+ <subtitle>Apache <trademark>Lucene</trademark> Integration</subtitle>
+
+ <subtitle>Reference Guide</subtitle>
+
+ <releaseinfo>&version;</releaseinfo>
+
+
+ <mediaobject>
+ <imageobject role="fo">
+ <imagedata align="center" fileref="hibernate_logo_a.png" />
+ </imageobject>
+
+ <imageobject role="html">
+ <imagedata depth="3cm" fileref="hibernate_logo_a.png" />
+ </imageobject>
+ </mediaobject>
+
+ </bookinfo>
+
+ <toc></toc>
+
+ <preface id="preface" revision="2">
+ <title>Preface</title>
+
+ <para>Full text search engines like Apache Lucene are very powerful
+ technologies to add efficient free text search capabilities to
+ applications. However, Lucene suffers several mismatches when dealing with
+ object domain model. Amongst other things indexes have to be kept up to
+ date and mismatches between index structure and domain model as well as
+ query mismatches have to be avoided.</para>
+
+ <para>Hibernate Search addresses these shortcomings - it indexes your
+ domain model with the help of a few annotations, takes care of
+ database/index synchronization and brings back regular managed objects
+ from free text queries. To achieve this Hibernate Search is combining the
+ power of <ulink url="http://www.hibernate.org">Hibernate</ulink> and
+ <ulink url="http://lucene.apache.org">Apache Lucene</ulink>.</para>
+ </preface>
+
+ <xi:include href="modules/getting-started.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/architecture.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/configuration.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/mapping.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/query.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/batchindex.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/optimize.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+ <xi:include href="modules/lucene-native.xml"
+ xmlns:xi="http://www.w3.org/2001/XInclude" />
+</book>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/master.xml
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Name: svn:eol-style
+ native
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/architecture.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/architecture.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/architecture.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,274 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-architecture">
+ <!-- $Id$ -->
+
+ <title>Architecture</title>
+
+ <section>
+ <title>Overview</title>
+
+ <para>Hibernate Search consists of an indexing component and an index
+ search component. Both are backed by Apache Lucene.</para>
+
+ <para>Each time an entity is inserted, updated or removed in/from the
+ database, Hibernate Search keeps track of this event (through the
+ Hibernate event system) and schedules an index update. All the index
+ updates are handled without you having to use the Apache Lucene APIs (see
+ <xref linkend="search-configuration-event" />).</para>
+
+ <para>To interact with Apache Lucene indexes, Hibernate Search has the
+ notion of <classname>DirectoryProvider</classname>s. A directory provider
+ will manage a given Lucene <classname>Directory</classname> type. You can
+ configure directory providers to adjust the directory target (see <xref
+ linkend="search-configuration-directory" />).</para>
+
+ <para>Hibernate Search uses the Lucene index to search an entity and
+ return a list of managed entities saving you the tedious object to Lucene
+ document mapping. The same persistence context is shared between Hibernate
+ and Hibernate Search. As a matter of fact, the
+ <classname>FullTextSession</classname> is built on top of the Hibernate
+ Session. so that the application code can use the unified
+ <classname>org.hibernate.Query</classname> or
+ <classname>javax.persistence.Query</classname> APIs exactly the way a HQL,
+ JPA-QL or native queries would do.</para>
+
+ <para>To be more efficient, Hibernate Search batches the write
+ interactions with the Lucene index. There is currently two types of
+ batching depending on the expected scope. Outside a transaction, the index
+ update operation is executed right after the actual database operation.
+ This scope is really a no scoping setup and no batching is performed.
+ However, it is recommended - for both your database and Hibernate Search -
+ to execute your operation in a transaction be it JDBC or JTA. When in a
+ transaction, the index update operation is scheduled for the transaction
+ commit phase and discarded in case of transaction rollback. The batching
+ scope is the transaction. There are two immediate benefits:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Performance: Lucene indexing works better when operation are
+ executed in batch.</para>
+ </listitem>
+
+ <listitem>
+ <para>ACIDity: The work executed has the same scoping as the one
+ executed by the database transaction and is executed if and only if
+ the transaction is committed. This is not ACID in the strict sense of
+ it, but ACID behavior is rarely useful for full text search indexes
+ since they can be rebuilt from the source at any time.</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>You can think of those two scopes (no scope vs transactional) as the
+ equivalent of the (infamous) autocommit vs transactional behavior. From a
+ performance perspective, the <emphasis>in transaction</emphasis> mode is
+ recommended. The scoping choice is made transparently. Hibernate Search
+ detects the presence of a transaction and adjust the scoping.</para>
+
+ <note>
+ Hibernate Search works perfectly fine in the Hibernate / EntityManager long conversation pattern aka. atomic conversation.
+ </note>
+
+ <note>
+ Depending on user demand, additional scoping will be considered, the pluggability mechanism being already in place.
+ </note>
+ </section>
+
+ <section>
+ <title>Back end</title>
+
+ <para>Hibernate Search offers the ability to let the scoped work being
+ processed by different back ends. Two back ends are provided out of the
+ box for two different scenarios.</para>
+
+ <section>
+ <title>Back end types</title>
+
+ <section>
+ <title>Lucene</title>
+
+ <para>In this mode, all index update operations applied on a given
+ node (JVM) will be executed to the Lucene directories (through the
+ directory providers) by the same node. This mode is typically used in
+ non clustered environment or in clustered environments where the
+ directory store is shared.</para>
+
+ <mediaobject>
+ <imageobject role="html">
+ <imagedata align="center" fileref="lucene-backend.png"
+ format="PNG" />
+ </imageobject>
+
+ <imageobject role="fo">
+ <imagedata align="center" depth="" fileref="lucene-backend.png"
+ format="PNG" scalefit="1" width="12cm" />
+ </imageobject>
+
+ <caption>
+ <para>Lucene back end configuration.</para>
+ </caption>
+ </mediaobject>
+
+ <para>This mode targets non clustered applications, or clustered
+ applications where the Directory is taking care of the locking
+ strategy.</para>
+
+ <para>The main advantage is simplicity and immediate visibility of the
+ changes in Lucene queries (a requirement in some applications).</para>
+ </section>
+
+ <section>
+ <title>JMS</title>
+
+ <para>All index update operations applied on a given node are sent to
+ a JMS queue. A unique reader will then process the queue and update
+ the master index. The master index is then replicated on a regular
+ basis to the slave copies. This is known as the master/slaves pattern.
+ The master is the sole responsible for updating the Lucene index. The
+ slaves can accept read as well as write operations. However, they only
+ process the read operation on their local index copy and delegate the
+ update operations to the master.</para>
+
+ <mediaobject>
+ <imageobject role="html">
+ <imagedata align="center" fileref="jms-backend.png" format="PNG" />
+ </imageobject>
+
+ <imageobject role="fo">
+ <imagedata align="center" depth="" fileref="jms-backend.png"
+ format="PNG" scalefit="1" width="12cm" />
+ </imageobject>
+
+ <caption>
+ <para>JMS back end configuration.</para>
+ </caption>
+ </mediaobject>
+
+ <para>This mode targets clustered environments where throughput is
+ critical, and index update delays are affordable. Reliability is
+ ensured by the JMS provider and by having the slaves working on a
+ local copy of the index.</para>
+ </section>
+
+ <section>
+ <title>JGroups</title>
+
+ <para>The JGroups based back end works similarly as the JMS one. Designed on the same
+ master/slave pattern, instead of JMS the JGroups toolkit is used as a replication mechanism.
+ This back end can be used as an alternative to JMS one when response time is still critical,
+ but i.e. JNDI service is not available.</para>
+ </section>
+
+ <note>Hibernate Search is an extensible architecture. Feel free to drop
+ ideas for other third party back ends to
+ <literal>hibernate-dev(a)lists.jboss.org</literal>.</note>
+ </section>
+
+ <section>
+ <title>Work execution</title>
+
+ <para>The indexing work (done by the back end) can be executed
+ synchronously with the transaction commit (or update operation if out of
+ transaction), or asynchronously.</para>
+
+ <section>
+ <title>Synchronous</title>
+
+ <para>This is the safe mode where the back end work is executed in
+ concert with the transaction commit. Under highly concurrent
+ environment, this can lead to throughput limitations (due to the
+ Apache Lucene lock mechanism) and it can increase the system response
+ time if the backend is significantly slower than the transactional
+ process and if a lot of IO operations are involved.</para>
+ </section>
+
+ <section>
+ <title>Asynchronous</title>
+
+ <para>This mode delegates the work done by the back end to a different
+ thread. That way, throughput and response time are (to a certain
+ extend) decorrelated from the back end performance. The drawback is
+ that a small delay appears between the transaction commit and the
+ index update and a small overhead is introduced to deal with thread
+ management.</para>
+
+ <para>It is recommended to use synchronous execution first and
+ evaluate asynchronous execution if performance problems occur and
+ after having set up a proper benchmark (ie not a lonely cowboy hitting
+ the system in a completely unrealistic way).</para>
+ </section>
+ </section>
+ </section>
+
+ <section id="search-architecture-readerstrategy" xreflabel="Reader strategy">
+ <title>Reader strategy</title>
+
+ <para>When executing a query, Hibernate Search interacts with the Apache
+ Lucene indexes through a reader strategy. Choosing a reader strategy will
+ depend on the profile of the application (frequent updates, read mostly,
+ asynchronous index update etc). See also <xref
+ linkend="configuration-reader-strategy" /></para>
+
+ <section>
+ <title>Shared</title>
+
+ <para>With this strategy, Hibernate Search will share the same
+ <classname>IndexReader</classname>, for a given Lucene index, across
+ multiple queries and threads provided that the
+ <classname>IndexReader</classname> is still up-to-date. If the
+ <classname>IndexReader</classname> is not up-to-date, a new one is
+ opened and provided. Each <classname>IndexReader</classname> is made of
+ several <classname>SegmentReader</classname>s. This strategy only
+ reopens segments that have been modified or created after last opening
+ and shares the already loaded segments from the previous instance. This
+ strategy is the default.</para>
+
+ <para>The name of this strategy is <literal>shared</literal>.</para>
+ </section>
+
+ <section>
+ <title>Not-shared</title>
+
+ <para>Every time a query is executed, a Lucene
+ <classname>IndexReader</classname> is opened. This strategy is not the
+ most efficient since opening and warming up an
+ <classname>IndexReader</classname> can be a relatively expensive
+ operation.</para>
+
+ <para>The name of this strategy is <literal>not-shared</literal>.</para>
+ </section>
+
+ <section>
+ <title>Custom</title>
+
+ <para>You can write your own reader strategy that suits your application
+ needs by implementing
+ <classname>org.hibernate.search.reader.ReaderProvider</classname>. The
+ implementation must be thread safe.</para>
+ </section>
+ </section>
+</chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/architecture.xml
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/batchindex.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/batchindex.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/batchindex.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,298 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="manual-index-changes">
+ <!-- $Id$ -->
+
+ <title>Manual index changes</title>
+
+ <para>As Hibernate core applies changes to the Database, Hibernate Search
+ detects these changes and will update the index automatically (unless the
+ EventListeners are disabled).
+ Sometimes changes are made to the database without using Hibernate, as when
+ backup is restored or your data is otherwise affected;
+ for these cases Hibernate Search exposes the Manual Index APIs to explicitly
+ update or remove a single entity from the index, or rebuild the index for
+ the whole database, or remove all references to a specific type.</para>
+ <para>All these methods affect the Lucene Index only, no changes are applied
+ to the Database.</para>
+
+ <section>
+ <title>Adding instances to the Index</title>
+
+ <para>Using <classname>FullTextSession</classname>.<methodname>index(T entity)</methodname>
+ you can directly add or update a specific object instance to the index.
+ If this entity was already indexed, then the index will be updated.
+ Changes to the index are only applied at transaction commit.</para>
+
+ <example>
+ <title>Indexing an entity via
+ <methodname>FullTextSession.index(T entity)</methodname></title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+Object customer = fullTextSession.load( Customer.class, 8 );
+<emphasis role="bold">fullTextSession.index(customer);</emphasis>
+tx.commit(); //index only updated at commit time</programlisting>
+ </example>
+
+ <para>In case you want to add all instances for a type, or for all indexed types,
+ the recommended approach is to use a <classname>MassIndexer</classname>: see
+ <xref linkend="search-batchindex-massindexer" /> for more details.</para>
+
+ </section>
+
+ <section>
+ <title>Deleting instances from the Index: Purging</title>
+
+ <para>It is equally possible to remove an entity or all entities of a
+ given type from a Lucene index without the need to physically remove them
+ from the database. This operation is named purging and is also done
+ through the <classname>FullTextSession</classname>.</para>
+
+ <example>
+ <title>Purging a specific instance of an entity from the index</title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+for (Customer customer : customers) {
+ <emphasis role="bold">fullTextSession.purge( Customer.class, customer.getId() );</emphasis>
+}
+tx.commit(); //index is updated at commit time</programlisting>
+ </example>
+
+ <para>Purging will remove the entity with the given id from the Lucene
+ index but will not touch the database.</para>
+
+ <para>If you need to remove all entities of a given type, you can use the
+ <methodname>purgeAll</methodname> method. This operation removes all
+ entities of the type passed as a parameter as well as all its
+ subtypes.</para>
+
+ <example>
+ <title>Purging all instances of an entity from the index</title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+<emphasis role="bold">fullTextSession.purgeAll( Customer.class );</emphasis>
+//optionally optimize the index
+//fullTextSession.getSearchFactory().optimize( Customer.class );
+tx.commit(); //index changes are applied at commit time </programlisting>
+ </example>
+
+ <para>It is recommended to optimize the index after such an
+ operation.</para>
+
+ <note>
+ <para>Methods <methodname>index</methodname>,
+ <methodname>purge</methodname> and <methodname>purgeAll</methodname> are
+ available on <classname>FullTextEntityManager</classname> as
+ well.</para>
+ </note>
+ <note>
+ <para>All manual indexing methods (<methodname>index</methodname>,
+ <methodname>purge</methodname> and <methodname>purgeAll</methodname>) only
+ affect the index, not the database, nevertheless they are
+ transactional and as such they won't be applied until the transaction
+ is successfully committed, or you make use of
+ <methodname>flushToIndexes</methodname>.</para>
+ </note>
+ </section>
+
+ <section id="search-batchindex">
+ <title>Rebuilding the whole Index</title>
+
+ <para>If you change the entity mapping to the index, chances are that
+ the whole Index needs to be updated; For example if you decide to index
+ a an existing field using a different analyzer you'll need to rebuild the
+ index for affected types. Also if the Database is replaced (like restored
+ from a backup, imported from a legacy system) you'll want to be able
+ to rebuild the index from existing data.
+ Hibernate Search provides two main strategies to choose from:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Using <classname>FullTextSession</classname>.<methodname>flushToIndexes()</methodname>
+ periodically, while using <classname>FullTextSession</classname>.<methodname>index()</methodname>
+ on all entities.</para>
+ </listitem>
+ <listitem>
+ <para>Use a <classname>MassIndexer</classname>.</para>
+ </listitem>
+ </itemizedlist>
+
+ <section id="search-batchindex-flushtoindexes">
+ <title>Using flushToIndexes()</title>
+
+ <para>
+ This strategy consists in removing the existing index and then adding
+ all entities back to the index using <classname>FullTextSession</classname>.<methodname>purgeAll()</methodname>
+ and <classname>FullTextSession</classname>.<methodname>index()</methodname>,
+ however there are some memory and efficiency contraints.
+ For maximum efficiency Hibernate Search batches index operations
+ and executes them at commit time. If you expect to index a lot of data
+ you need to be careful about memory consumption since all
+ documents are kept in a queue until the transaction commit. You can
+ potentially face an <classname>OutOfMemoryException</classname> if you
+ don't empty the queue periodically: to do this you can use
+ <methodname>fullTextSession.flushToIndexes()</methodname>. Every time
+ <methodname>fullTextSession.flushToIndexes()</methodname> is called (or if
+ the transaction is committed), the batch queue is processed
+ applying all index changes. Be aware that, once flushed, the changes
+ cannot be rolled back.</para>
+
+ <example>
+ <title>Index rebuilding using index() and flushToIndexes()</title>
+
+ <programlisting>fullTextSession.setFlushMode(FlushMode.MANUAL);
+fullTextSession.setCacheMode(CacheMode.IGNORE);
+transaction = fullTextSession.beginTransaction();
+//Scrollable results will avoid loading too many objects in memory
+ScrollableResults results = fullTextSession.createCriteria( Email.class )
+ .setFetchSize(BATCH_SIZE)
+ .scroll( ScrollMode.FORWARD_ONLY );
+int index = 0;
+while( results.next() ) {
+ index++;
+ fullTextSession.index( results.get(0) ); //index each element
+ if (index % BATCH_SIZE == 0) {
+ fullTextSession.flushToIndexes(); //apply changes to indexes
+ fullTextSession.clear(); //free memory since the queue is processed
+ }
+}
+transaction.commit();</programlisting>
+ </example>
+
+ <note>
+ <para><literal>hibernate.search.worker.batch_size</literal> has been
+ deprecated in favor of this explicit API which provides better
+ control</para>
+ </note>
+ <para>Try to use a batch size that guarantees that your application will
+ not run out of memory: with a bigger batch size objects are fetched faster
+ from database but more memory is needed.</para>
+
+ </section>
+
+ <section id="search-batchindex-massindexer">
+ <title>Using a MassIndexer</title>
+ <para>Hibernate Search's <classname>MassIndexer</classname> uses several
+ parallel threads to rebuild the index; you can optionally select which entities
+ need to be reloaded or have it reindex all entities. This approach is optimized
+ for best performance but requires to set the application in maintenance mode:
+ making queries to the index is not recommended when a MassIndexer is busy.</para>
+
+ <example>
+ <title>Index rebuilding using a MassIndexer</title>
+ <programlisting>fullTextSession.createIndexer().startAndWait();</programlisting>
+ <para>This will rebuild the index, deleting it and then reloading all entities
+ from the database. Although it's simple to use, some tweaking is recommended
+ to speed up the process: there are several parameters configurable.</para>
+ </example>
+
+ <warning><para>During the progress of a MassIndexer the content of the index is undefined,
+ make sure that nobody will try to make some query during index rebuilding!
+ If somebody should query the index it will not corrupt but most results will
+ likely be missing.</para></warning>
+
+ <example>
+ <title>Using a tuned MassIndexer</title>
+ <programlisting>fullTextSession
+ .createIndexer( User.class )
+ .batchSizeToLoadObjects( 25 )
+ .cacheMode( CacheMode.NORMAL )
+ .threadsToLoadObjects( 5 )
+ .threadsForSubsequentFetching( 20 )
+ .startAndWait();</programlisting>
+ <para>This will rebuild the index of all User instances (and subtypes), and will
+ create 5 parallel threads to load the User instances using batches of 25 objects
+ per query; these loaded User instances are then pipelined to 20 parallel threads
+ to load the attached lazy collections of User containing some information needed
+ for the index.</para>
+ <para>It is recommended to leave cacheMode to <literal>CacheMode.IGNORE</literal> (the default), as
+ in most reindexing situations the cache will be a useless additional overhead;
+ it might be useful to enable some other <literal>CacheMode</literal> depending on your data: it might increase
+ performance if the main entity is relating to enum-like data included in the index.</para>
+ </example>
+
+ <tip><para>The "sweet spot" of number of threads to achieve best performance is
+ highly dependent on your overall architecture, database design and even data
+ values.
+ To find out the best number of threads for your application it is recommended
+ to use a profiler: all internal thread groups have meaningful names
+ to be easily identified with most tools.</para>
+ </tip>
+
+ <note>
+ <para>The MassIndexer was designed for speed and is unaware of transactions,
+ so there is no need to begin one or committing. Also because it is not transactional
+ it is not recommended to let users use the system during it's processing,
+ as it is unlikely people will be able to find results and the system load
+ might be too high anyway.</para>
+ </note>
+
+ </section>
+
+ <para>Other parameters which also affect indexing time and memory
+ consumption are:</para>
+
+ <itemizedlist>
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].exclusive_index_use</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.max_buffered_docs</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.max_field_length</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.max_merge_docs</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.merge_factor</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.ram_buffer_size</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.batch.term_index_interval</literal>
+ </listitem>
+ </itemizedlist>
+
+ <para>All <literal>.indexwriter</literal> parameters are Lucene specific and
+ Hibernate Search is just passing these parameters through - see <xref
+ linkend="lucene-indexing-performance" /> for more details.</para>
+
+ </section>
+
+ </chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/batchindex.xml
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/configuration.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/configuration.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/configuration.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1163 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-configuration">
+ <!-- $Id$ -->
+
+ <title>Configuration</title>
+
+ <section id="search-configuration-directory" revision="1">
+ <title>Directory configuration</title>
+
+ <para>Apache Lucene has a notion of <literal>Directory</literal> to store
+ the index files. The <classname>Directory</classname> implementation can
+ be customized, but Lucene comes bundled with a file system
+ (<literal>FSDirectoryProvider</literal>) and an in memory
+ (<literal>RAMDirectoryProvider</literal>) implementation.
+ <literal>DirectoryProvider</literal>s are the Hibernate Search abstraction
+ around a Lucene <classname>Directory</classname> and handle the
+ configuration and the initialization of the underlying Lucene resources.
+ <xref linkend="directory-provider-table" /> shows the list of the
+ directory providers bundled with Hibernate Search.</para>
+
+ <table id="directory-provider-table">
+ <title>List of built-in Directory Providers</title>
+
+ <tgroup cols="3">
+ <thead>
+ <row>
+ <entry align="center">Class</entry>
+
+ <entry align="center">Description</entry>
+
+ <entry align="center">Properties</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry>org.hibernate.search.store.RAMDirectoryProvider</entry>
+
+ <entry>Memory based directory, the directory will be uniquely
+ identified (in the same deployment unit) by the
+ <literal>@Indexed.index</literal> element</entry>
+
+ <entry>none</entry>
+ </row>
+
+ <row>
+ <entry>org.hibernate.search.store.FSDirectoryProvider</entry>
+
+ <entry>File system based directory. The directory used will be
+ <indexBase>/< indexName ></entry>
+
+ <entry><para><literal>indexBase</literal> : Base
+ directory</para><para><literal>indexName</literal>: override
+ @Indexed.index (useful for sharded indexes)</para><para><literal>
+ locking_strategy</literal> : optional, see <xref
+ linkend="search-configuration-directory-lockfactories" />
+ </para></entry>
+ </row>
+
+ <row>
+ <entry>org.hibernate.search.store.FSMasterDirectoryProvider</entry>
+
+ <entry><para>File system based directory. Like
+ FSDirectoryProvider. It also copies the index to a source
+ directory (aka copy directory) on a regular basis.
+ </para><para>The recommended value for the refresh period is (at
+ least) 50% higher that the time to copy the information (default
+ 3600 seconds - 60 minutes).</para><para>Note that the copy is
+ based on an incremental copy mechanism reducing the average copy
+ time.</para><para>DirectoryProvider typically used on the master
+ node in a JMS back end cluster.</para><para>The <literal>
+ buffer_size_on_copy</literal> optimum depends on your operating
+ system and available RAM; most people reported good results using
+ values between 16 and 64MB.</para></entry>
+
+ <entry><para><literal>indexBase</literal>: Base
+ directory</para><para><literal>indexName</literal>: override
+ @Indexed.index (useful for sharded
+ indexes)</para><para><literal>sourceBase</literal>: Source (copy)
+ base directory.</para><para><literal>source</literal>: Source
+ directory suffix (default to <literal>@Indexed.index</literal>).
+ The actual source directory name being
+ <filename><sourceBase>/<source></filename>
+ </para><para><literal>refresh</literal>: refresh period in second
+ (the copy will take place every refresh seconds).</para><para>
+ <literal>buffer_size_on_copy</literal>: The amount of MegaBytes to
+ move in a single low level copy instruction; defaults to
+ 16MB.</para><para><literal> locking_strategy</literal> : optional,
+ see <xref
+ linkend="search-configuration-directory-lockfactories" />
+ </para></entry>
+ </row>
+
+ <row>
+ <entry>org.hibernate.search.store.FSSlaveDirectoryProvider</entry>
+
+ <entry><para>File system based directory. Like
+ FSDirectoryProvider, but retrieves a master version (source) on a
+ regular basis. To avoid locking and inconsistent search results, 2
+ local copies are kept. </para><para>The recommended value for the
+ refresh period is (at least) 50% higher that the time to copy the
+ information (default 3600 seconds - 60 minutes).</para><para>Note
+ that the copy is based on an incremental copy mechanism reducing
+ the average copy time.</para><para>DirectoryProvider typically
+ used on slave nodes using a JMS back end.</para><para>The
+ <literal> buffer_size_on_copy</literal> optimum depends on your
+ operating system and available RAM; most people reported good
+ results using values between 16 and 64MB.</para></entry>
+
+ <entry><para><literal>indexBase</literal>: Base
+ directory</para><para><literal>indexName</literal>: override
+ @Indexed.index (useful for sharded
+ indexes)</para><para><literal>sourceBase</literal>: Source (copy)
+ base directory.</para><para><literal>source</literal>: Source
+ directory suffix (default to <literal>@Indexed.index</literal>).
+ The actual source directory name being
+ <filename><sourceBase>/<source></filename>
+ </para><para><literal>refresh</literal>: refresh period in second
+ (the copy will take place every refresh seconds).</para><para>
+ <literal>buffer_size_on_copy</literal>: The amount of MegaBytes to
+ move in a single low level copy instruction; defaults to
+ 16MB.</para><para><literal> locking_strategy</literal> : optional,
+ see <xref
+ linkend="search-configuration-directory-lockfactories" />
+ </para></entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <para>If the built-in directory providers do not fit your needs, you can
+ write your own directory provider by implementing the
+ <classname>org.hibernate.store.DirectoryProvider</classname>
+ interface.</para>
+
+ <para>Each indexed entity is associated to a Lucene index (an index can be
+ shared by several entities but this is not usually the case). You can
+ configure the index through properties prefixed by
+ <constant>hibernate.search.</constant><replaceable>indexname</replaceable>
+ . Default properties inherited to all indexes can be defined using the
+ prefix <constant>hibernate.search.default.</constant></para>
+
+ <para>To define the directory provider of a given index, you use the
+ <constant>hibernate.search.<replaceable>indexname</replaceable>.directory_provider
+ </constant></para>
+
+ <example>
+ <title>Configuring directory providers</title>
+
+ <programlisting>hibernate.search.default.directory_provider org.hibernate.search.store.FSDirectoryProvider
+hibernate.search.default.indexBase=/usr/lucene/indexes
+hibernate.search.Rules.directory_provider org.hibernate.search.store.RAMDirectoryProvider</programlisting>
+ </example>
+
+ <para>applied on</para>
+
+ <example>
+ <title>Specifying the index name using the <literal>index</literal>
+ parameter of <classname>@Indexed</classname></title>
+
+ <programlisting>@Indexed(index="Status")
+public class Status { ... }
+
+@Indexed(index="Rules")
+public class Rule { ... }</programlisting>
+ </example>
+
+ <para>will create a file system directory in
+ <filename>/usr/lucene/indexes/Status</filename> where the Status entities
+ will be indexed, and use an in memory directory named
+ <literal>Rules</literal> where Rule entities will be indexed.</para>
+
+ <para>You can easily define common rules like the directory provider and
+ base directory, and override those defaults later on on a per index
+ basis.</para>
+
+ <para>Writing your own <classname>DirectoryProvider</classname>, you can
+ utilize this configuration mechanism as well.</para>
+ </section>
+
+ <section id="search-configuration-directory-sharding" revision="1">
+ <title>Sharding indexes</title>
+
+ <para>In some cases, it is necessary to split (shard) the indexing data of
+ a given entity type into several Lucene indexes. This solution is not
+ recommended unless there is a pressing need because by default, searches
+ will be slower as all shards have to be opened for a single search. In
+ other words don't do it until you have problems :)</para>
+
+ <para>For example, sharding may be desirable if:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>A single index is so huge that index update times are slowing
+ the application down.</para>
+ </listitem>
+
+ <listitem>
+ <para>A typical search will only hit a sub-set of the index, such as
+ when data is naturally segmented by customer, region or
+ application.</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Hibernate Search allows you to index a given entity type into
+ several sub indexes. Data is sharded into the different sub indexes thanks
+ to an <classname>IndexShardingStrategy</classname>. By default, no
+ sharding strategy is enabled, unless the number of shards is configured.
+ To configure the number of shards use the following property</para>
+
+ <example>
+ <title>Enabling index sharding by specifying nbr_of_shards for a
+ specific index</title>
+
+ <programlisting>hibernate.search.<indexName>.sharding_strategy.nbr_of_shards 5</programlisting>
+ </example>
+
+ <para>This will use 5 different shards.</para>
+
+ <para>The default sharding strategy, when shards are set up, splits the
+ data according to the hash value of the id string representation
+ (generated by the Field Bridge). This ensures a fairly balanced sharding.
+ You can replace the strategy by implementing
+ <literal>IndexShardingStrategy</literal> and by setting the following
+ property</para>
+
+ <example>
+ <title>Specifying a custom sharding strategy</title>
+
+ <programlisting>hibernate.search.<indexName>.sharding_strategy my.shardingstrategy.Implementation</programlisting>
+ </example>
+
+ <para>Using a custom <classname>IndexShardingStrategy</classname>
+ implementation, it's possible to define what shard a given entity is
+ indexed to. </para>
+
+ <para>It also allows for optimizing searches by selecting which shard to
+ run the query onto. By activating a filter (see <xref
+ linkend="query-filter-shard" />), a sharding strategy can select a subset
+ of the shards used to answer a query
+ (<classname>IndexShardingStrategy.getDirectoryProvidersForQuery</classname>)
+ and thus speed up the query execution.</para>
+
+ <para>Each shard has an independent directory provider configuration as
+ described in <xref linkend="search-configuration-directory" />. The
+ <classname>DirectoryProvider</classname> default name for the previous
+ example are <literal><indexName>.0</literal> to
+ <literal><indexName>.4</literal>. In other words, each shard has the
+ name of it's owning index followed by <constant>.</constant> (dot) and its
+ index number.</para>
+
+ <example>
+ <title>Configuring the sharding configuration for an example entity
+ <classname>Animal</classname></title>
+
+ <programlisting>hibernate.search.default.indexBase /usr/lucene/indexes
+
+hibernate.search.Animal.sharding_strategy.nbr_of_shards 5
+hibernate.search.Animal.directory_provider org.hibernate.search.store.FSDirectoryProvider
+hibernate.search.Animal.0.indexName Animal00
+hibernate.search.Animal.3.indexBase /usr/lucene/sharded
+hibernate.search.Animal.3.indexName Animal03</programlisting>
+ </example>
+
+ <para>This configuration uses the default id string hashing strategy and
+ shards the Animal index into 5 subindexes. All subindexes are
+ <classname>FSDirectoryProvider</classname> instances and the directory
+ where each subindex is stored is as followed:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>for subindex 0: /usr/lucene/indexes/Animal00 (shared indexBase
+ but overridden indexName)</para>
+ </listitem>
+
+ <listitem>
+ <para>for subindex 1: /usr/lucene/indexes/Animal.1 (shared indexBase,
+ default indexName)</para>
+ </listitem>
+
+ <listitem>
+ <para>for subindex 2: /usr/lucene/indexes/Animal.2 (shared indexBase,
+ default indexName)</para>
+ </listitem>
+
+ <listitem>
+ <para>for subindex 3: /usr/lucene/shared/Animal03 (overridden
+ indexBase, overridden indexName)</para>
+ </listitem>
+
+ <listitem>
+ <para>for subindex 4: /usr/lucene/indexes/Animal.4 (shared indexBase,
+ default indexName)</para>
+ </listitem>
+ </itemizedlist>
+ </section>
+
+ <section>
+ <title>Sharing indexes (two entities into the same directory)</title>
+
+ <note>
+ <para>This is only presented here so that you know the option is
+ available. There is really not much benefit in sharing indexes.</para>
+ </note>
+
+ <para>It is technically possible to store the information of more than one
+ entity into a single Lucene index. There are two ways to accomplish
+ this:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Configuring the underlying directory providers to point to the
+ same physical index directory. In practice, you set the property
+ <literal>hibernate.search.[fully qualified entity
+ name].indexName</literal> to the same value. As an example let’s use
+ the same index (directory) for the <classname>Furniture</classname>
+ and <classname>Animal</classname> entity. We just set
+ <literal>indexName</literal> for both entities to for example
+ “Animal”. Both entities will then be stored in the Animal
+ directory</para>
+
+ <para><programlisting><code>hibernate.search.org.hibernate.search.test.shards.Furniture.indexName = Animal
+hibernate.search.org.hibernate.search.test.shards.Animal.indexName = Animal</code></programlisting></para>
+ </listitem>
+
+ <listitem>
+ <para>Setting the <code>@Indexed</code> annotation’s
+ <methodname>index</methodname> attribute of the entities you want to
+ merge to the same value. If we again wanted all
+ <classname>Furniture</classname> instances to be indexed in the
+ <classname>Animal</classname> index along with all instances of
+ <classname>Animal</classname> we would specify
+ <code>@Indexed(index=”Animal”)</code> on both
+ <classname>Animal</classname> and <classname>Furniture</classname>
+ classes.</para>
+ </listitem>
+ </itemizedlist>
+ </section>
+
+ <section>
+ <title>Worker configuration</title>
+
+ <para>It is possible to refine how Hibernate Search interacts with Lucene
+ through the worker configuration. The work can be executed to the Lucene
+ directory or sent to a JMS queue for later processing. When processed to
+ the Lucene directory, the work can be processed synchronously or
+ asynchronously to the transaction commit.</para>
+
+ <para>You can define the worker configuration using the following
+ properties</para>
+
+ <table>
+ <title>worker configuration</title>
+
+ <tgroup cols="2">
+ <tbody>
+ <row>
+ <entry>Property</entry>
+
+ <entry>Description</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.backend</literal></entry>
+
+ <entry>Out of the box support for the Apache Lucene back end and
+ the JMS back end. Default to <literal>lucene</literal>. Supports
+ also <literal>jms</literal>, <literal>blackhole</literal>,
+ <literal>jgroupsMaster</literal> and <literal>jgroupsSlave</literal>.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.execution</literal></entry>
+
+ <entry>Supports synchronous and asynchronous execution. Default to
+ <literal><literal>sync</literal></literal>. Supports also
+ <literal>async</literal>.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.thread_pool.size</literal></entry>
+
+ <entry>Defines the number of threads in the pool. useful only for
+ asynchronous execution. Default to 1.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.buffer_queue.max</literal></entry>
+
+ <entry>Defines the maximal number of work queue if the thread poll
+ is starved. Useful only for asynchronous execution. Default to
+ infinite. If the limit is reached, the work is done by the main
+ thread.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jndi.*</literal></entry>
+
+ <entry>Defines the JNDI properties to initiate the InitialContext
+ (if needed). JNDI is only used by the JMS back end.</entry>
+ </row>
+
+ <row>
+ <entry><literal>
+ hibernate.search.worker.jms.connection_factory</literal></entry>
+
+ <entry>Mandatory for the JMS back end. Defines the JNDI name to
+ lookup the JMS connection factory from
+ (<literal>/ConnectionFactory</literal> by default in JBoss
+ AS)</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jms.queue</literal></entry>
+
+ <entry>Mandatory for the JMS back end. Defines the JNDI name to
+ lookup the JMS queue from. The queue will be used to post work
+ messages.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jgroups.clusterName</literal></entry>
+
+ <entry>Optional for JGroups back end. Defines the name of JGroups channel.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jgroups.configurationFile</literal></entry>
+
+ <entry>Optional JGroups network stack configuration. Defines the name of a JGroups
+ configuration file, which must exist on classpath.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jgroups.configurationXml</literal></entry>
+
+ <entry>Optional JGroups network stack configuration.
+ Defines a String representing JGroups configuration as XML.</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.worker.jgroups.configurationString</literal></entry>
+
+ <entry>Optional JGroups network stack configuration.
+ Provides JGroups configuration in plain text.</entry>
+ </row>
+
+ </tbody>
+ </tgroup>
+ </table>
+ </section>
+
+ <section id="jms-backend">
+ <title>JMS Master/Slave configuration</title>
+
+ <para>This section describes in greater detail how to configure the Master
+ / Slaves Hibernate Search architecture.</para>
+
+ <mediaobject>
+ <imageobject role="html">
+ <imagedata align="center" fileref="jms-backend.png" format="PNG" />
+ </imageobject>
+
+ <imageobject role="fo">
+ <imagedata align="center" depth="" fileref="jms-backend.png"
+ format="PNG" scalefit="1" width="12cm" />
+ </imageobject>
+
+ <caption><para>JMS back end configuration.</para></caption>
+ </mediaobject>
+
+ <section>
+ <title>Slave nodes</title>
+
+ <para>Every index update operation is sent to a JMS queue. Index
+ querying operations are executed on a local index copy.</para>
+
+ <example>
+ <title>JMS Slave configuration</title>
+
+ <programlisting>### slave configuration
+
+## DirectoryProvider
+# (remote) master location
+hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/mastercopy
+
+# local copy location
+hibernate.search.default.indexBase = /Users/prod/lucenedirs
+
+# refresh every half hour
+hibernate.search.default.refresh = 1800
+
+# appropriate directory provider
+hibernate.search.default.directory_provider = org.hibernate.search.store.FSSlaveDirectoryProvider
+
+## Backend configuration
+hibernate.search.worker.backend = jms
+hibernate.search.worker.jms.connection_factory = /ConnectionFactory
+hibernate.search.worker.jms.queue = queue/hibernatesearch
+#optional jndi configuration (check your JMS provider for more information)
+
+## Optional asynchronous execution strategy
+# hibernate.search.worker.execution = async
+# hibernate.search.worker.thread_pool.size = 2
+# hibernate.search.worker.buffer_queue.max = 50</programlisting>
+ </example>
+
+ <para>A file system local copy is recommended for faster search
+ results.</para>
+
+ <para>The refresh period should be higher that the expected time
+ copy.</para>
+ </section>
+
+ <section>
+ <title>Master node</title>
+
+ <para>Every index update operation is taken from a JMS queue and
+ executed. The master index is copied on a regular basis.</para>
+
+ <example>
+ <title>JMS Master configuration</title>
+
+ <programlisting>### master configuration
+
+## DirectoryProvider
+# (remote) master location where information is copied to
+hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/mastercopy
+
+# local master location
+hibernate.search.default.indexBase = /Users/prod/lucenedirs
+
+# refresh every half hour
+hibernate.search.default.refresh = 1800
+
+# appropriate directory provider
+hibernate.search.default.directory_provider = org.hibernate.search.store.FSMasterDirectoryProvider
+
+## Backend configuration
+#Backend is the default lucene one</programlisting>
+ </example>
+
+ <para>The refresh period should be higher that the expected time
+ copy.</para>
+
+ <para>In addition to the Hibernate Search framework configuration, a
+ Message Driven Bean should be written and set up to process the index
+ works queue through JMS.</para>
+
+ <example>
+ <title>Message Driven Bean processing the indexing queue</title>
+
+ <programlisting>@MessageDriven(activationConfig = {
+ @ActivationConfigProperty(propertyName="destinationType", propertyValue="javax.jms.Queue"),
+ @ActivationConfigProperty(propertyName="destination", propertyValue="queue/hibernatesearch"),
+ @ActivationConfigProperty(propertyName="DLQMaxResent", propertyValue="1")
+ } )
+public class MDBSearchController extends AbstractJMSHibernateSearchController implements MessageListener {
+ @PersistenceContext EntityManager em;
+
+ //method retrieving the appropriate session
+ protected Session getSession() {
+ return (Session) em.getDelegate();
+ }
+
+ //potentially close the session opened in #getSession(), not needed here
+ protected void cleanSessionIfNeeded(Session session)
+ }
+}</programlisting>
+ </example>
+
+ <para>This example inherits from the abstract JMS controller class
+ available in the Hibernate Search source code and implements a JavaEE 5
+ MDB. This implementation is given as an example and, while most likely
+ be more complex, can be adjusted to make use of non Java EE Message
+ Driven Beans. For more information about the
+ <methodname>getSession()</methodname> and
+ <methodname>cleanSessionIfNeeded()</methodname>, please check
+ <classname>AbstractJMSHibernateSearchController</classname>'s
+ javadoc.</para>
+ </section>
+ </section>
+
+ <section id="jgroups-backend">
+ <title>JGroups Master/Slave configuration</title>
+ <para>Describes how to configure JGroups Master/Slave back end.
+ Configuration examples illustrated in JMS Master/Slave configuration
+ section (<xref linkend="jms-backend" />) also apply here, only
+ a different backend needs to be set.
+ </para>
+ <section>
+ <title>Slave nodes</title>
+ <para>Every index update operation is sent through a JGroups channel to the master node. Index
+ querying operations are executed on a local index copy.
+ </para>
+ <example><title>JGroups Slave configuration</title>
+ <programlisting>
+### slave configuration
+## Backend configuration
+hibernate.search.worker.backend = jgroupsSlave
+ </programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>Master node</title>
+ <para>Every index update operation is taken from a JGroups channel and
+ executed. The master index is copied on a regular basis.
+ </para>
+ <example><title>JGroups Master configuration</title>
+ <programlisting>
+### master configuration
+## Backend configuration
+hibernate.search.worker.backend = jgroupsMaster
+ </programlisting>
+ </example>
+ </section>
+ <section>
+ <title>JGroups channel configuration</title>
+ <para>Optionally configuration for JGroups transport protocols
+ (UDP, TCP) and channel name can be defined. It can be applied to both master and slave nodes.
+ There are several ways to configure JGroups transport details.
+ If it is not defined explicity, configuration found in the <literal>
+ flush-udp.xml</literal> file is used.</para>
+ <example><title>JGroups transport protocols configuration</title>
+ <programlisting>
+## configuration
+#udp.xml file needs to be located in the classpath
+hibernate.search.worker.backend.jgroups.configurationFile = udp.xml
+
+#protocol stack configuration provided in XML format
+hibernate.search.worker.backend.jgroups.configurationXml =
+
+<config xmlns="urn:org:jgroups"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xsi:schemaLocation="urn:org:jgroups file:schema/JGroups-2.8.xsd">
+<UDP
+mcast_addr="${jgroups.udp.mcast_addr:228.10.10.10}"
+mcast_port="${jgroups.udp.mcast_port:45588}"
+tos="8"
+thread_naming_pattern="pl"
+thread_pool.enabled="true"
+thread_pool.min_threads="2"
+thread_pool.max_threads="8"
+thread_pool.keep_alive_time="5000"
+thread_pool.queue_enabled="false"
+thread_pool.queue_max_size="100"
+thread_pool.rejection_policy="Run"/>
+<PING timeout="1000" num_initial_members="3"/>
+<MERGE2 max_interval="30000" min_interval="10000"/>
+<FD_SOCK/>
+<FD timeout="3000" max_tries="3"/>
+<VERIFY_SUSPECT timeout="1500"/>
+<pbcast.STREAMING_STATE_TRANSFER/>
+<pbcast.FLUSH timeout="0"/>
+</config>
+
+#protocol stack configuration provided in "old style" jgroups format
+hibernate.search.worker.backend.jgroups.configurationString =
+
+UDP(mcast_addr=228.1.2.3;mcast_port=45566;ip_ttl=32):PING(timeout=3000;
+num_initial_members=6):FD(timeout=5000):VERIFY_SUSPECT(timeout=1500):
+pbcast.NAKACK(gc_lag=10;retransmit_timeout=3000):UNICAST(timeout=5000):
+FRAG:pbcast.GMS(join_timeout=3000;shun=false;print_local_addr=true)
+
+ </programlisting>
+ </example>
+ <para>Master and slave nodes communicate over JGroups channel
+ that is identified by this same name. Name of the channel can be defined
+ explicity, if not default <literal>HSearchCluster</literal> is used.</para>
+ <example><title>JGroups channel name configuration</title>
+ <programlisting>
+## Backend configuration
+hibernate.search.worker.backend.jgroups.clusterName = Hibernate-Search-Cluster
+ </programlisting>
+ </example>
+ </section>
+ </section>
+
+ <section id="configuration-reader-strategy">
+ <title>Reader strategy configuration</title>
+
+ <para>The different reader strategies are described in <xref
+ linkend="search-architecture-readerstrategy" />. Out of the box strategies
+ are:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><literal>shared</literal>: share index readers across several
+ queries. This strategy is the most efficient.</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>not-shared</literal>: create an index reader for each
+ individual query</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>The default reader strategy is <literal>shared</literal>. This can
+ be adjusted:</para>
+
+ <programlisting>hibernate.search.reader.strategy = not-shared</programlisting>
+
+ <para>Adding this property switches to the <literal>not-shared</literal>
+ strategy.</para>
+
+ <para>Or if you have a custom reader strategy:</para>
+
+ <programlisting>hibernate.search.reader.strategy = my.corp.myapp.CustomReaderProvider</programlisting>
+
+ <para>where <classname>my.corp.myapp.CustomReaderProvider</classname> is
+ the custom strategy implementation.</para>
+ </section>
+
+ <section id="search-configuration-event" revision="2">
+ <title>Enabling Hibernate Search and automatic indexing</title>
+
+ <section>
+ <title>Enabling Hibernate Search</title>
+
+ <para>Hibernate Search is enabled out of the box when using Hibernate
+ Annotations or Hibernate EntityManager. If, for some reason you need to
+ disable it, set
+ <literal>hibernate.search.autoregister_listeners</literal> to false.
+ Note that there is no performance penalty when the listeners are enabled
+ but no entities are annotated as indexed.</para>
+
+ <para>To enable Hibernate Search in Hibernate Core (ie. if you don't use
+ Hibernate Annotations), add the
+ <literal>FullTextIndexEventListener</literal> for the following six
+ Hibernate events and also add it after the default
+ <literal>DefaultFlushEventListener</literal>, as in the following
+ example.</para>
+
+ <example>
+ <title>Explicitly enabling Hibernate Search by configuring the
+ <classname>FullTextIndexEventListener</classname></title>
+
+ <programlisting><hibernate-configuration>
+ <session-factory>
+ ...
+ <event type="post-update">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="post-insert">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="post-delete">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="post-collection-recreate">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="post-collection-remove">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="post-collection-update">
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ <event type="flush">
+ <listener class="org.hibernate.event.def.DefaultFlushEventListener"/>
+ <listener class="org.hibernate.search.event.FullTextIndexEventListener"/>
+ </event>
+ </session-factory>
+</hibernate-configuration></programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>Automatic indexing</title>
+
+ <para>By default, every time an object is inserted, updated or deleted
+ through Hibernate, Hibernate Search updates the according Lucene index.
+ It is sometimes desirable to disable that features if either your index
+ is read-only or if index updates are done in a batch way (see <xref
+ linkend="search-batchindex" />).</para>
+
+ <para>To disable event based indexing, set</para>
+
+ <programlisting>hibernate.search.indexing_strategy manual</programlisting>
+
+ <note>
+ <para>In most case, the JMS backend provides the best of both world, a
+ lightweight event based system keeps track of all changes in the
+ system, and the heavyweight indexing process is done by a separate
+ process or machine.</para>
+ </note>
+ </section>
+ </section>
+
+ <section id="lucene-indexing-performance" revision="3">
+ <title>Tuning Lucene indexing performance</title>
+
+ <para>Hibernate Search allows you to tune the Lucene indexing performance
+ by specifying a set of parameters which are passed through to underlying
+ Lucene <literal>IndexWriter</literal> such as
+ <literal>mergeFactor</literal>, <literal>maxMergeDocs</literal> and
+ <literal>maxBufferedDocs</literal>. You can specify these parameters
+ either as default values applying for all indexes, on a per index basis,
+ or even per shard.</para>
+
+ <para>There are two sets of parameters allowing for different performance
+ settings depending on the use case. During indexing operations triggered
+ by database modifications, the parameters are grouped by the
+ <literal>transaction</literal> keyword: <programlisting>hibernate.search.[default|<indexname>].indexwriter.transaction.<parameter_name></programlisting>
+ When indexing occurs via <literal>FullTextSession.index()</literal> or
+ via a <classname>MassIndexer</classname> (see
+ <xref linkend="search-batchindex" />), the used properties are those
+ grouped under the <literal>batch</literal> keyword: <programlisting>hibernate.search.[default|<indexname>].indexwriter.batch.<parameter_name></programlisting></para>
+
+ <para>If no value is set for a
+ <literal>.batch</literal> value in a specific shard configuration,
+ Hibernate Search will look at the index section, then at the default
+ section: <programlisting>hibernate.search.Animals.2.indexwriter.transaction.max_merge_docs 10
+hibernate.search.Animals.2.indexwriter.transaction.merge_factor 20
+hibernate.search.default.indexwriter.batch.max_merge_docs 100</programlisting>
+ This configuration will result in these settings applied to the second
+ shard of Animals index:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><literal>transaction.max_merge_docs</literal> = 10</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>batch.max_merge_docs</literal> = 100</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>transaction.merge_factor</literal> = 20</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>batch.merge_factor</literal> = Lucene default</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>All other values will use the defaults defined in Lucene.</para>
+
+ <para>The default for all values is to leave them at Lucene's own default,
+ so the listed values in the following table actually depend on the version
+ of Lucene you are using; values shown are relative to version
+ <literal>2.4</literal>. For more information about Lucene indexing
+ performances, please refer to the Lucene documentation.</para>
+
+ <warning><para>Previous versions had the <literal>batch</literal>
+ parameters inherit from <literal>transaction</literal> properties.
+ This needs now to be explicitly set.</para></warning>
+
+ <table>
+ <title>List of indexing performance and behavior properties</title>
+
+ <tgroup cols="3">
+ <thead>
+ <row>
+ <entry align="center">Property</entry>
+
+ <entry align="center">Description</entry>
+
+ <entry align="center">Default Value</entry>
+ </row>
+ </thead>
+
+ <tbody>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].exclusive_index_use</literal></entry>
+
+ <entry><para>Set to <literal>true</literal> when no other
+ process will need to write to the same index: this will enable
+ Hibernate Search to work in exlusive mode on the index and
+ improve performance in writing changes to the index.</para></entry>
+
+ <entry><literal>false</literal> (releases locks as soon as possible)</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_buffered_delete_terms</literal></entry>
+
+ <entry><para>Determines the minimal number of delete terms
+ required before the buffered in-memory delete terms are applied
+ and flushed. If there are documents buffered in memory at the
+ time, they are merged and a new segment is created.</para></entry>
+
+ <entry>Disabled (flushes by RAM usage)</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_buffered_docs</literal></entry>
+
+ <entry><para>Controls the amount of documents buffered in memory
+ during indexing. The bigger the more RAM is
+ consumed.</para></entry>
+
+ <entry>Disabled (flushes by RAM usage)</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_field_length</literal></entry>
+
+ <entry><para>The maximum number of terms that will be indexed for
+ a single field. This limits the amount of memory required for
+ indexing so that very large data will not crash the indexing
+ process by running out of memory. This setting refers to the
+ number of running terms, not to the number of different
+ terms.</para> <para>This silently truncates large documents,
+ excluding from the index all terms that occur further in the
+ document. If you know your source documents are large, be sure to
+ set this value high enough to accommodate the expected size. If
+ you set it to Integer.MAX_VALUE, then the only limit is your
+ memory, but you should anticipate an OutOfMemoryError. </para>
+ <para>If setting this value in <literal>batch</literal>
+ differently than in <literal>transaction</literal> you may get
+ different data (and results) in your index depending on the
+ indexing mode.</para></entry>
+
+ <entry>10000</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_merge_docs</literal></entry>
+
+ <entry><para>Defines the largest number of documents allowed in a
+ segment. Larger values are best for batched indexing and speedier
+ searches. Small values are best for transaction
+ indexing.</para></entry>
+
+ <entry>Unlimited (Integer.MAX_VALUE)</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].merge_factor</literal></entry>
+
+ <entry><para>Controls segment merge frequency and size. </para>
+ <para>Determines how often segment indexes are merged when
+ insertion occurs. With smaller values, less RAM is used while
+ indexing, and searches on unoptimized indexes are faster, but
+ indexing speed is slower. With larger values, more RAM is used
+ during indexing, and while searches on unoptimized indexes are
+ slower, indexing is faster. Thus larger values (> 10) are best
+ for batch index creation, and smaller values (< 10) for indexes
+ that are interactively maintained. The value must no be lower than
+ 2.</para></entry>
+
+ <entry>10</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].ram_buffer_size</literal></entry>
+
+ <entry><para>Controls the amount of RAM in MB dedicated to
+ document buffers. When used together max_buffered_docs a flush
+ occurs for whichever event happens first.</para> <para>Generally
+ for faster indexing performance it's best to flush by RAM usage
+ instead of document count and use as large a RAM buffer as you
+ can.</para></entry>
+
+ <entry>16 MB</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].term_index_interval</literal></entry>
+
+ <entry><para>Expert: Set the interval between indexed
+ terms.</para> <para>Large values cause less memory to be used by
+ IndexReader, but slow random-access to terms. Small values cause
+ more memory to be used by an IndexReader, and speed random-access
+ to terms. See Lucene documentation for more
+ details.</para></entry>
+
+ <entry>128</entry>
+ </row>
+
+ <row>
+ <entry><literal>hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].use_compound_file</literal></entry>
+
+ <entry>The advantage of using the compound file format is that
+ less file descriptors are used. The disadvantage is that indexing
+ takes more time and temporary disk space. You can set this
+ parameter to <literal>false</literal> in an attempt to improve the
+ indexing time, but you could run out of file descriptors if
+ <literal>mergeFactor</literal> is also
+ large.<para></para><para>Boolean parameter, use
+ "<literal>true</literal>" or "<literal>false</literal>". The
+ default value for this option is
+ <literal>true</literal>.</para></entry>
+
+ <entry>true</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <tip><para>When your architecture permits it, always set
+ <literal>hibernate.search.default.exclusive_index_use=true</literal>
+ as it greatly improves efficiency in index writing.</para>
+ </tip>
+
+ <para>To tune the indexing speed it might be useful to time the object
+ loading from database in isolation from the writes to the index. To
+ achieve this set the <literal>blackhole</literal> as worker backend and
+ start you indexing routines. This backend does not disable Hibernate
+ Search: it will still generate the needed changesets to the index, but
+ will discard them instead of flushing them to the index. As opposite to
+ setting the <literal>hibernate.search.indexing_strategy</literal> to
+ <literal>manual</literal> when using <literal>blackhole</literal> it will
+ possibly load more data to rebuild the index from associated
+ entities.</para>
+
+ <programlisting>hibernate.search.worker.backend blackhole</programlisting>
+
+ <para>The recommended approach is to focus first on optimizing the object
+ loading, and then use the timings you achieve as a baseline to tune the
+ indexing process.</para>
+
+ <para>The <literal>blackhole</literal> backend is not meant to be used in
+ production, only as a tool to identify indexing bottlenecks.</para>
+ </section>
+
+ <section id="search-configuration-directory-lockfactories" revision="1">
+
+
+ <title>LockFactory configuration</title>
+
+
+
+ <para>Lucene Directories have default locking strategies which work well
+ for most cases, but it's possible to specify for each index managed by
+ Hibernate Search which LockingFactory you want to use.</para>
+
+
+
+ <para>Some of these locking strategies require a filesystem level lock and
+ may be used even on RAM based indexes, but this is not recommended and of
+ no practical use.</para>
+
+
+
+ <para>To select a locking factory, set the
+ <literal>hibernate.search.<index>.locking_strategy</literal> option
+ to one of <literal>simple</literal>, <literal>native</literal>,
+ <literal>single</literal> or <literal>none</literal>, or set it to the
+ fully qualified name of an implementation of
+ <literal>org.hibernate.search.store.LockFactoryFactory</literal>;
+ Implementing this interface you can provide a custom
+ <literal>org.apache.lucene.store.LockFactory</literal>. <table
+ id="search-configuration-directory-lockfactories-table">
+ <title>List of available LockFactory implementations</title>
+
+ <tgroup cols="3">
+ <thead>
+ <row>
+ <entry align="center">name</entry>
+
+ <entry align="center">Class</entry>
+
+ <entry align="center">Description</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry>simple</entry>
+
+ <entry>org.apache.lucene.store.SimpleFSLockFactory</entry>
+
+ <entry>
+ <para>Safe implementation based on Java's File API, it marks
+ the usage of the index by creating a marker file.</para>
+
+ <para>If for some reason you had to kill your application, you
+ will need to remove this file before restarting it.</para>
+
+ <para>This is the default implementation for
+ <literal>FSDirectoryProvider</literal>,<literal>FSMasterDirectoryProvider</literal>
+ and <literal>FSSlaveDirectoryProvider</literal>.</para>
+ </entry>
+ </row>
+
+ <row>
+ <entry>native</entry>
+
+ <entry>org.apache.lucene.store.NativeFSLockFactory</entry>
+
+ <entry>
+ <para>As does <literal>simple</literal> this also marks the
+ usage of the index by creating a marker file, but this one is
+ using native OS file locks so that even if your application
+ crashes the locks will be cleaned up.</para>
+
+ <para>This implementation has known problems on NFS.</para>
+ </entry>
+ </row>
+
+ <row>
+ <entry>single</entry>
+
+ <entry>org.apache.lucene.store.SingleInstanceLockFactory</entry>
+
+ <entry>
+ <para>This LockFactory doesn't use a file marker but is a Java
+ object lock held in memory; therefore it's possible to use it
+ only when you are sure the index is not going to be shared by
+ any other process.</para>
+
+ <para>This is the default implementation for
+ <literal>RAMDirectoryProvider</literal>.</para>
+ </entry>
+ </row>
+
+ <row>
+ <entry>none</entry>
+
+ <entry>org.apache.lucene.store.NoLockFactory</entry>
+
+ <entry>
+ <para>All changes to this index are not coordinated by any
+ lock; test your application carefully and make sure you know
+ what it means.</para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table></para>
+
+ Configuration example:
+
+ <programlisting>hibernate.search.default.locking_strategy simple
+hibernate.search.Animals.locking_strategy native
+hibernate.search.Books.locking_strategy org.custom.components.MyLockingFactory</programlisting>
+
+
+
+ <para />
+
+
+ </section>
+</chapter>
\ No newline at end of file
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/configuration.xml
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/getting-started.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/getting-started.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/getting-started.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,644 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id$ -->
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd" [
+<!ENTITY version "WORKING">
+]>
+<chapter id="getting-started">
+
+ <title>Getting started</title>
+
+ <para>Welcome to Hibernate Search! The following chapter will guide you
+ through the initial steps required to integrate Hibernate Search into an
+ existing Hibernate enabled application. In case you are a Hibernate new
+ timer we recommend you start <ulink
+ url="http://hibernate.org/152.html">here</ulink>.</para>
+
+ <section>
+ <title>System Requirements</title>
+
+ <table>
+ <title>System requirements</title>
+
+ <tgroup cols="2">
+ <tbody>
+ <row>
+ <entry>Java Runtime</entry>
+
+ <entry>A JDK or JRE version <emphasis>5</emphasis> or greater. You
+ can download a Java Runtime for Windows/Linux/Solaris <ulink
+ url="http://java.sun.com/javase/downloads/">here</ulink>.</entry>
+ </row>
+
+ <row>
+ <entry>Hibernate Search</entry>
+
+ <entry><literal>hibernate-search.jar</literal> and all runtime
+ dependencies from the <literal>lib</literal> directory of the
+ Hibernate Search distribution. Please refer to
+ <filename>README.txt </filename>in the lib directory to understand
+ which dependencies are required.</entry>
+ </row>
+
+ <row>
+ <entry>Hibernate Core</entry>
+
+ <entry>This instructions have been tested against Hibernate 3.3.x.
+ You will need <literal>hibernate-core.jar</literal> and its
+ transitive dependencies from the <literal>lib</literal> directory
+ of the distribution. Refer to <literal>README.txt</literal> in the
+ <literal>lib</literal> directory of the distribution to determine
+ the minimum runtime requirements.</entry>
+ </row>
+
+ <row>
+ <entry>Hibernate Annotations</entry>
+
+ <entry>Even though Hibernate Search can be used without Hibernate
+ Annotations the following instructions will use them for basic
+ entity configuration (<emphasis>@Entity, @Id,
+ @OneToMany,...</emphasis>). This part of the configuration could
+ also be expressed in xml or code. However, Hibernate Search itself
+ has its own set of annotations (<emphasis>@Indexed, @DocumentId,
+ @Field,...</emphasis>) for which there exists so far no
+ alternative configuration. The tutorial is tested against version
+ 3.4.x of Hibernate Annotations.</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <para>You can download all dependencies from the Hibernate <ulink
+ url="http://www.hibernate.org/6.html">download site</ulink>. You can also
+ verify the dependency versions against the <ulink
+ url="http://www.hibernate.org/6.html#A3">Hibernate Compatibility
+ Matrix</ulink>.</para>
+ </section>
+
+ <section>
+ <title>Using Maven</title>
+
+ <para>Instead of managing all dependencies manually, maven users have the
+ possibility to use the <ulink
+ url="http://repository.jboss.com/maven2">JBoss maven repository</ulink>.
+ Just add the JBoss repository url to the <emphasis>repositories</emphasis>
+ section of your <filename>pom.xml</filename> or
+ <filename>settings.xml</filename>:</para>
+
+ <example>
+ <title>Adding the JBoss maven repository to
+ <filename>settings.xml</filename></title>
+
+ <programlisting>
+<repository>
+ <id>repository.jboss.org</id>
+ <name>JBoss Maven Repository</name>
+ <url>http://repository.jboss.org/maven2</url>
+ <layout>default</layout>
+</repository>
+ </programlisting>
+ </example>
+
+ <para>Then add the following dependencies to your pom.xml:</para>
+
+ <example>
+ <title>Maven dependencies for Hibernate Search</title>
+
+ <programlisting>
+<dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-search</artifactId>
+ <version>&version;</version>
+</dependency>
+<dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-annotations</artifactId>
+ <version>3.4.0.GA</version>
+</dependency>
+<dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-entitymanager</artifactId>
+ <version>3.4.0.GA</version>
+</dependency>
+<dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-common</artifactId>
+ <version>1.3.0</version>
+</dependency>
+<dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-core</artifactId>
+ <version>1.3.0</version>
+</dependency>
+<dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-snowball</artifactId>
+ <version>2.4.0</version>
+</dependency>
+ </programlisting>
+ </example>
+
+ <para>Not all dependencies are required. Only the
+ <emphasis>hibernate-search</emphasis> dependency is mandatory. This
+ dependency, together with its required transitive dependencies, contain
+ all required classes needed to use Hibernate Search.
+ <emphasis>hibernate-annotations</emphasis> is only needed if you want to
+ use annotations to configure your domain model as we do in this tutorial.
+ However, even if you choose not to use Hibernate Annotations you still
+ have to use the Hibernate Search specific annotations, which are bundled
+ with the hibernate-search jar file, to configure your Lucene index.
+ Currently there is no XML configuration available for Hibernate Search.
+ <emphasis>hibernate-entitymanager</emphasis> is required if you want to
+ use Hibernate Search in conjunction with JPA. The Solr dependencies are
+ needed if you want to utilize Solr's analyzer framework. More about this
+ later. And finally, the <literal>lucene-snowball</literal> dependency is
+ needed if you want to use Lucene's snowball stemmer.</para>
+ </section>
+
+ <section>
+ <title>Configuration</title>
+
+ <para>Once you have downloaded and added all required dependencies to your
+ application you have to add a couple of properties to your hibernate
+ configuration file. If you are using Hibernate directly this can be done
+ in <literal>hibernate.properties</literal> or
+ <literal>hibernate.cfg.xml</literal>. If you are using Hibernate via JPA
+ you can also add the properties to <literal>persistence.xml</literal>. The
+ good news is that for standard use most properties offer a sensible
+ default. An example <filename>persistence.xml</filename> configuration
+ could look like this:</para>
+
+ <example>
+ <title>Basic configuration options to be added to
+ <literal><filename>hibernate.properties</filename></literal>,
+ <literal><filename>hibernate.cfg.xml</filename></literal> or
+ <filename>persistence.xml</filename></title>
+
+ <programlisting>
+...
+<property name="hibernate.search.default.directory_provider"
+ value="org.hibernate.search.store.FSDirectoryProvider"/>
+
+<property name="hibernate.search.default.indexBase" value="/var/lucene/indexes"/>
+...
+ </programlisting>
+ </example>
+
+ <para>First you have to tell Hibernate Search which
+ <classname>DirectoryProvider</classname> to use. This can be achieved by
+ setting the <literal>hibernate.search.default.directory_provider</literal>
+ property. Apache Lucene has the notion of a <literal>Directory</literal>
+ to store the index files. Hibernate Search handles the initialization and
+ configuration of a Lucene <literal>Directory</literal> instance via a
+ <literal>DirectoryProvider</literal>. In this tutorial we will use a
+ subclass of <literal>DirectoryProvider</literal> called
+ <classname>FSDirectoryProvider</classname>. This will give us the ability
+ to physically inspect the Lucene indexes created by Hibernate Search (eg
+ via <ulink url="http://www.getopt.org/luke/">Luke</ulink>). Once you have
+ a working configuration you can start experimenting with other directory
+ providers (see <xref linkend="search-configuration-directory" />). Next to
+ the directory provider you also have to specify the default root directory
+ for all indexes via
+ <literal>hibernate.search.default.indexBase</literal>.</para>
+
+ <para>Lets assume that your application contains the Hibernate managed
+ classes <classname>example.Book</classname> and
+ <classname>example.Author</classname> and you want to add free text search
+ capabilities to your application in order to search the books contained in
+ your database.</para>
+
+ <example>
+ <title>Example entities Book and Author before adding Hibernate Search
+ specific annotations</title>
+
+ <programlisting>
+package example;
+...
+@Entity
+public class Book {
+
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ private String title;
+
+ private String subtitle;
+
+ @ManyToMany
+ private Set<Author> authors = new HashSet<Author>();
+
+ private Date publicationDate;
+
+ public Book() {
+ }
+
+ // standard getters/setters follow here
+ ...
+}
+ </programlisting>
+
+ <programlisting>
+package example;
+...
+@Entity
+public class Author {
+
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ private String name;
+
+ public Author() {
+ }
+
+ // standard getters/setters follow here
+ ...
+}
+
+</programlisting>
+ </example>
+
+ <para>To achieve this you have to add a few annotations to the
+ <classname>Book</classname> and <classname>Author</classname> class. The
+ first annotation <literal>@Indexed</literal> marks
+ <classname>Book</classname> as indexable. By design Hibernate Search needs
+ to store an untokenized id in the index to ensure index unicity for a
+ given entity. <literal>@DocumentId</literal> marks the property to use for
+ this purpose and is in most cases the same as the database primary key. In
+ fact since the 3.1.0 release of Hibernate Search
+ <literal>@DocumentId</literal> is optional in the case where an
+ <classname>@Id</classname> annotation exists.</para>
+
+ <para>Next you have to mark the fields you want to make searchable. Let's
+ start with <literal>title</literal> and <literal>subtitle</literal> and
+ annotate both with <literal>@Field</literal>. The parameter
+ <literal>index=Index.TOKENIZED</literal> will ensure that the text will be
+ tokenized using the default Lucene analyzer. Usually, tokenizing means
+ chunking a sentence into individual words and potentially excluding common
+ words like <literal>'a'</literal> or '<literal>the</literal>'. We will
+ talk more about analyzers a little later on. The second parameter we
+ specify within <literal>@Field</literal>,<literal>
+ store=Store.NO</literal>, ensures that the actual data will not be stored
+ in the index. Whether this data is stored in the index or not has nothing
+ to do with the ability to search for it. From Lucene's perspective it is
+ not necessary to keep the data once the index is created. The benefit of
+ storing it is the ability to retrieve it via projections (<xref
+ linkend="projections" />). </para>
+
+ <para>Without projections, Hibernate Search will per default execute a
+ Lucene query in order to find the database identifiers of the entities
+ matching the query critera and use these identifiers to retrieve managed
+ objects from the database. The decision for or against projection has to
+ be made on a case to case basis. The default behaviour -
+ <literal>Store.NO</literal> - is recommended since it returns managed
+ objects whereas projections only return object arrays.</para>
+
+ <para>After this short look under the hood let's go back to annotating the
+ <classname>Book</classname> class. Another annotation we have not yet
+ discussed is <literal>@DateBridge</literal>. This annotation is one of the
+ built-in field bridges in Hibernate Search. The Lucene index is purely
+ string based. For this reason Hibernate Search must convert the data types
+ of the indexed fields to strings and vice versa. A range of predefined
+ bridges are provided, including the <classname>DateBridge</classname>
+ which will convert a <classname>java.util.Date</classname> into a
+ <classname>String</classname> with the specified resolution. For more
+ details see <xref linkend="search-mapping-bridge" />.</para>
+
+ <para>This leaves us with <literal>@IndexedEmbedded. </literal>This
+ annotation is used to index associated entities
+ (<literal>@ManyToMany</literal>, <literal>@*ToOne</literal> and
+ <literal>@Embedded</literal>) as part of the owning entity. This is needed
+ since a Lucene index document is a flat data structure which does not know
+ anything about object relations. To ensure that the authors' name wil be
+ searchable you have to make sure that the names are indexed as part of the
+ book itself. On top of <literal>@IndexedEmbedded</literal> you will also
+ have to mark all fields of the associated entity you want to have included
+ in the index with <literal>@Indexed</literal>. For more details see <xref
+ linkend="search-mapping-associated" />.</para>
+
+ <para>These settings should be sufficient for now. For more details on
+ entity mapping refer to <xref linkend="search-mapping-entity" />.</para>
+
+ <example>
+ <title>Example entities after adding Hibernate Search
+ annotations</title>
+
+ <programlisting>
+package example;
+...
+@Entity
+<emphasis role="bold">@Indexed</emphasis>
+public class Book {
+
+ @Id
+ @GeneratedValue
+ <emphasis role="bold">@DocumentId</emphasis>
+ private Integer id;
+
+ <emphasis role="bold">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>
+ private String title;
+
+ <emphasis role="bold">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>
+ private String subtitle;
+
+ <emphasis role="bold">@IndexedEmbedded</emphasis>
+ @ManyToMany
+ private Set<Author> authors = new HashSet<Author>();
+
+<emphasis role="bold"> @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ @DateBridge(resolution = Resolution.DAY)</emphasis>
+ private Date publicationDate;
+
+ public Book() {
+ }
+
+ // standard getters/setters follow here
+ ...
+}
+ </programlisting>
+
+ <programlisting>
+package example;
+...
+@Entity
+public class Author {
+
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ <emphasis role="bold">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>
+ private String name;
+
+ public Author() {
+ }
+
+ // standard getters/setters follow here
+ ...
+}
+ </programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>Indexing</title>
+
+ <para>Hibernate Search will transparently index every entity persisted,
+ updated or removed through Hibernate Core. However, you have to trigger an
+ initial indexing to populate the Lucene index with the data already present
+ in your database. Once you have added the above properties and annotations
+ it is time to trigger an initial batch index of your books. You can
+ achieve this by using one of the following code snippets (see also <xref
+ linkend="search-batchindex" />):</para>
+
+ <example>
+ <title>Using Hibernate Session to index data</title>
+
+ <programlisting>
+FullTextSession fullTextSession = Search.getFullTextSession(session);
+fullTextSession.createIndexer().startAndWait();</programlisting>
+ </example>
+
+ <example>
+ <title>Using JPA to index data</title>
+
+ <programlisting>
+EntityManager em = entityManagerFactory.createEntityManager();
+FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(em);
+fullTextEntityManager.createIndexer().startAndWait();</programlisting>
+ </example>
+
+ <para>After executing the above code, you should be able to see a Lucene
+ index under <literal>/var/lucene/indexes/example.Book</literal>. Go ahead
+ an inspect this index with <ulink
+ url="http://www.getopt.org/luke/">Luke</ulink>. It will help you to
+ understand how Hibernate Search works.</para>
+ </section>
+
+ <section>
+ <title>Searching</title>
+
+ <para>Now it is time to execute a first search. The general approach is to
+ create a native Lucene query and then wrap this query into a
+ org.hibernate.Query in order to get all the functionality one is used to
+ from the Hibernate API. The following code will prepare a query against
+ the indexed fields, execute it and return a list of
+ <classname>Book</classname>s. </para>
+
+ <example>
+ <title>Using Hibernate Session to create and execute a search</title>
+
+ <programlisting>
+FullTextSession fullTextSession = Search.getFullTextSession(session);
+Transaction tx = fullTextSession.beginTransaction();
+
+// create native Lucene query
+String[] fields = new String[]{"title", "subtitle", "authors.name", "publicationDate"};
+MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());
+org.apache.lucene.search.Query query = parser.parse( "Java rocks!" );
+
+// wrap Lucene query in a org.hibernate.Query
+org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(query, Book.class);
+
+// execute search
+List result = hibQuery.list();
+
+tx.commit();
+session.close();
+ </programlisting>
+ </example>
+
+ <example>
+ <title>Using JPA to create and execute a search</title>
+
+ <programlisting>
+EntityManager em = entityManagerFactory.createEntityManager();
+FullTextEntityManager fullTextEntityManager =
+ org.hibernate.search.jpa.Search.getFullTextEntityManager(em);
+em.getTransaction().begin();
+
+// create native Lucene query
+String[] fields = new String[]{"title", "subtitle", "authors.name", "publicationDate"};
+MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());
+org.apache.lucene.search.Query query = parser.parse( "Java rocks!" );
+
+// wrap Lucene query in a javax.persistence.Query
+javax.persistence.Query persistenceQuery = fullTextEntityManager.createFullTextQuery(query, Book.class);
+
+// execute search
+List result = persistenceQuery.getResultList();
+
+em.getTransaction().commit();
+em.close();
+
+ </programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>Analyzer</title>
+
+ <para>Let's make things a little more interesting now. Assume that one of
+ your indexed book entities has the title "Refactoring: Improving the
+ Design of Existing Code" and you want to get hits for all of the following
+ queries: "refactor", "refactors", "refactored" and "refactoring". In
+ Lucene this can be achieved by choosing an analyzer class which applies
+ word stemming during the indexing <emphasis role="bold">as well
+ as</emphasis> search process. Hibernate Search offers several ways to
+ configure the analyzer to use (see <xref linkend="analyzer" />):</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Setting the <literal>hibernate.search.analyzer</literal>
+ property in the configuration file. The specified class will then be
+ the default analyzer.</para>
+ </listitem>
+
+ <listitem>
+ <para>Setting the <literal><literal>@Analyzer</literal></literal>
+ annotation at the entity level.</para>
+ </listitem>
+
+ <listitem>
+ <para>Setting the <literal>@<literal>Analyzer</literal></literal>
+ annotation at the field level.</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>When using the <literal>@Analyzer</literal> annotation one can
+ either specify the fully qualified classname of the analyzer to use or one
+ can refer to an analyzer definition defined by the
+ <literal>@AnalyzerDef</literal> annotation. In the latter case the Solr
+ analyzer framework with its factories approach is utilized. To find out
+ more about the factory classes available you can either browse the Solr
+ JavaDoc or read the corresponding section on the <ulink
+ url="http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters">Solr
+ Wiki.</ulink> Note that depending on the chosen factory class additional
+ libraries on top of the Solr dependencies might be required. For example,
+ the <classname>PhoneticFilterFactory</classname> depends on <ulink
+ url="http://commons.apache.org/codec">commons-codec</ulink>.</para>
+
+ <para>In the example below a
+ <classname>StandardTokenizerFactory</classname> is used followed by two
+ filter factories, <classname>LowerCaseFilterFactory</classname> and
+ <classname>SnowballPorterFilterFactory</classname>. The standard tokenizer
+ splits words at punctuation characters and hyphens while keeping email
+ addresses and internet hostnames intact. It is a good general purpose
+ tokenizer. The lowercase filter lowercases the letters in each token
+ whereas the snowball filter finally applies language specific
+ stemming.</para>
+
+ <para>Generally, when using the Solr framework you have to start with a
+ tokenizer followed by an arbitrary number of filters.</para>
+
+ <example>
+ <title>Using <classname>@AnalyzerDef</classname> and the Solr framework
+ to define and use an analyzer</title>
+
+ <programlisting>
+
+package example;
+...
+@Entity
+@Indexed
+<emphasis role="bold">@AnalyzerDef(name = "customanalyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {
+ @Parameter(name = "language", value = "English")
+ })
+ })</emphasis>
+public class Book {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index=Index.TOKENIZED, store=Store.NO)
+ <emphasis role="bold">@Analyzer(definition = "customanalyzer")</emphasis>
+ private String title;
+
+ @Field(index=Index.TOKENIZED, store=Store.NO)
+ <emphasis role="bold">@Analyzer(definition = "customanalyzer")</emphasis>
+ private String subtitle;
+
+ @IndexedEmbedded
+ @ManyToMany
+ private Set<Author> authors = new HashSet<Author>();
+
+<emphasis role="bold"> </emphasis> @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ @DateBridge(resolution = Resolution.DAY)
+ private Date publicationDate;
+
+ public Book() {
+ }
+
+ // standard getters/setters follow here
+ ...
+}
+
+ </programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>What's next</title>
+
+ <para>The above paragraphs hopefully helped you getting an overview of
+ Hibernate Search. Using the maven archetype plugin and the following
+ command you can create an initial runnable maven project structure
+ populated with the example code of this tutorial.</para>
+
+ <example>
+ <title>Using the Maven archetype to create tutorial sources</title>
+
+ <programlisting>mvn archetype:create \
+ -DarchetypeGroupId=org.hibernate \
+ -DarchetypeArtifactId=hibernate-search-quickstart \
+ -DarchetypeVersion=3.2.0.Beta1 \
+ -DgroupId=my.company -DartifactId=quickstart</programlisting>
+ </example>
+
+ <para>Using the maven project you can execute the examples, inspect the
+ file system based index and search and retrieve a list of managed objects.
+ Just run <emphasis>mvn package</emphasis> to compile the sources and run
+ the unit tests.</para>
+
+ <para>The next step after this tutorial is to get more familiar with the
+ overall architecture of Hibernate Search (<xref
+ linkend="search-architecture" />) and explore the basic features in more
+ detail. Two topics which were only briefly touched in this tutorial were
+ analyzer configuration (<xref linkend="analyzer" />) and field bridges
+ (<xref linkend="search-mapping-bridge" />), both important features
+ required for more fine-grained indexing. More advanced topics cover
+ clustering (<xref linkend="jms-backend" />) and large indexes handling
+ (<xref linkend="search-configuration-directory-sharding" />).</para>
+ </section>
+</chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/getting-started.xml
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/lucene-native.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/lucene-native.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/lucene-native.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,220 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-lucene-native">
+ <!-- $Id$ -->
+
+ <title>Advanced features</title>
+
+ <section>
+ <title>SearchFactory</title>
+
+ <para>The <classname>SearchFactory</classname> object keeps track of the
+ underlying Lucene resources for Hibernate Search, it's also a convenient
+ way to access Lucene natively. The <classname>SearchFactory</classname>
+ can be accessed from a <classname>FullTextSession</classname>:</para>
+
+ <example>
+ <title>Accessing the <classname>SearchFactory</classname></title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession(regularSession);
+SearchFactory searchFactory = fullTextSession.getSearchFactory();</programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>Accessing a Lucene Directory</title>
+
+ <para>You can always access the Lucene directories through plain Lucene,
+ the Directory structure is in no way different with or without Hibernate
+ Search. However there are some more convenient ways to access a given
+ Directory. The <classname>SearchFactory</classname> keeps track of the
+ <classname>DirectoryProvider</classname>s per indexed class. One directory
+ provider can be shared amongst several indexed classes if the classes
+ share the same underlying index directory. While usually not the case, a
+ given entity can have several <classname>DirectoryProvider</classname>s if
+ the index is sharded (see <xref
+ linkend="search-configuration-directory-sharding" />).</para>
+
+ <example>
+ <title>Accessing the Lucene <classname>Directory</classname></title>
+
+ <programlisting>DirectoryProvider[] provider = searchFactory.getDirectoryProviders(Order.class);
+org.apache.lucene.store.Directory directory = provider[0].getDirectory();</programlisting>
+ </example>
+
+ <para>In this example, directory points to the lucene index storing
+ <classname>Order</classname>s information. Note that the obtained Lucene
+ directory must not be closed (this is Hibernate Search
+ responsibility).</para>
+ </section>
+
+ <section>
+ <title>Using an IndexReader</title>
+
+ <para>Queries in Lucene are executed on an <literal>IndexReader</literal>.
+ Hibernate Search caches all index readers to maximize performance. Your
+ code can access this cached resources, but you have to follow some "good
+ citizen" rules.</para>
+
+ <example>
+ <title>Accessing an <classname>IndexReader</classname></title>
+
+ <programlisting>DirectoryProvider orderProvider = searchFactory.getDirectoryProviders(Order.class)[0];
+DirectoryProvider clientProvider = searchFactory.getDirectoryProviders(Client.class)[0];
+
+ReaderProvider readerProvider = searchFactory.getReaderProvider();
+IndexReader reader = readerProvider.openReader(orderProvider, clientProvider);
+
+try {
+ //do read-only operations on the reader
+}
+finally {
+ readerProvider.closeReader(reader);
+}</programlisting>
+ </example>
+
+ <para>The ReaderProvider (described in <xref
+ linkend="search-architecture-readerstrategy" />), will open an IndexReader
+ on top of the index(es) referenced by the directory providers. Because
+ this <classname>IndexReader</classname> is shared amongst several clients,
+ you must adhere to the following rules:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Never call indexReader.close(), but always call
+ readerProvider.closeReader(reader), preferably in a finally
+ block.</para>
+ </listitem>
+
+ <listitem>
+ <para>Don't use this <classname>IndexReader</classname> for
+ modification operations (you would get an exception). If you want to
+ use a read/write index reader, open one from the Lucene Directory
+ object.</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Aside from those rules, you can use the IndexReader freely,
+ especially to do native queries. Using the shared
+ <literal>IndexReader</literal>s will make most queries more
+ efficient.</para>
+ </section>
+
+ <section>
+ <title>Customizing Lucene's scoring formula</title>
+
+ <para>Lucene allows the user to customize its scoring formula by extending
+ <classname>org.apache.lucene.search.Similarity</classname>. The abstract
+ methods defined in this class match the factors of the following formula
+ calculating the score of query q for document d:</para>
+
+ <para><emphasis role="bold">score(q,d) = coord(q,d) · queryNorm(q) ·
+ ∑<subscript>t in q</subscript> ( tf(t in d) ·
+ idf(t)<superscript>2</superscript> · t.getBoost() · norm(t,d)
+ )</emphasis></para>
+
+ <para><informaltable align="left" width="">
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry align="center">Factor</entry>
+
+ <entry align="center">Description</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry align="left">tf(t ind)</entry>
+
+ <entry>Term frequency factor for the term (t) in the document
+ (d).</entry>
+ </row>
+
+ <row>
+ <entry align="left">idf(t)</entry>
+
+ <entry>Inverse document frequency of the term.</entry>
+ </row>
+
+ <row>
+ <entry align="left">coord(q,d)</entry>
+
+ <entry>Score factor based on how many of the query terms are
+ found in the specified document.</entry>
+ </row>
+
+ <row>
+ <entry align="left">queryNorm(q)</entry>
+
+ <entry>Normalizing factor used to make scores between queries
+ comparable.</entry>
+ </row>
+
+ <row>
+ <entry align="left">t.getBoost()</entry>
+
+ <entry>Field boost.</entry>
+ </row>
+
+ <row>
+ <entry align="left">norm(t,d)</entry>
+
+ <entry>Encapsulates a few (indexing time) boost and length
+ factors.</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>It is beyond the scope of this manual to explain this
+ formula in more detail. Please refer to
+ <classname>Similarity</classname>'s Javadocs for more information.</para>
+
+ <para>Hibernate Search provides two ways to modify Lucene's similarity
+ calculation. First you can set the default similarity by specifying the
+ fully specified classname of your <classname>Similarity</classname>
+ implementation using the property
+ <constant>hibernate.search.similarity</constant>. The default value is
+ <classname>org.apache.lucene.search.DefaultSimilarity</classname>.
+ Additionally you can override the default similarity on class level using
+ the <literal>@Similarity</literal> annotation.<programlisting>@Entity
+@Indexed
+<emphasis role="bold">@Similarity(impl = DummySimilarity.class)</emphasis>
+public class Book {
+ ...
+}</programlisting>As an example, let's assume it is not important how often a
+ term appears in a document. Documents with a single occurrence of the term
+ should be scored the same as documents with multiple occurrences. In this
+ case your custom implementation of the method <methodname>tf(float
+ freq)</methodname> should return 1.0.</para>
+
+ <warning><para>When two entities share the same index they must declare the
+ same <classname>Similarity</classname> implementation. Classes in the same
+ class hierarchy always share the index, so it's not allowed to override the
+ <classname>Similarity</classname> implementation in a subtype.</para></warning>
+
+ </section>
+</chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/lucene-native.xml
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/mapping.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/mapping.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/mapping.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,2276 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-mapping" revision="3">
+ <!-- $Id$ -->
+
+ <title>Mapping entities to the index structure</title>
+
+ <para>All the metadata information needed to index entities is described
+ through annotations. There is no need for xml mapping files. In fact there
+ is currently no xml configuration option available (see <ulink
+ url="http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-210">HSEARCH-210</ulink>).
+ You can still use Hibernate mapping files for the basic Hibernate
+ configuration, but the Hibernate Search specific configuration has to be
+ expressed via annotations.</para>
+
+ <section id="search-mapping-entity" revision="3">
+ <title>Mapping an entity</title>
+
+ <section id="basic-mapping">
+ <title>Basic mapping</title>
+
+ <para>First, we must declare a persistent class as indexable. This is
+ done by annotating the class with <literal>@Indexed</literal> (all
+ entities not annotated with <literal>@Indexed</literal> will be ignored
+ by the indexing process):</para>
+
+ <example>
+ <title>Making a class indexable using the
+ <classname>@Indexed</classname> annotation</title>
+
+ <programlisting>@Entity
+<emphasis role="bold">@Indexed(index="indexes/essays")</emphasis>
+public class Essay {
+ ...
+}</programlisting>
+ </example>
+
+ <para>The <literal>index</literal> attribute tells Hibernate what the
+ Lucene directory name is (usually a directory on your file system). It
+ is recommended to define a base directory for all Lucene indexes using
+ the <literal>hibernate.search.default.indexBase</literal> property in
+ your configuration file. Alternatively you can specify a base directory
+ per indexed entity by specifying
+ <literal>hibernate.search.<index>.indexBase, </literal>where
+ <literal><index></literal> is the fully qualified classname of the
+ indexed entity. Each entity instance will be represented by a Lucene
+ <classname>Document</classname> inside the given index (aka
+ Directory).</para>
+
+ <para>For each property (or attribute) of your entity, you have the
+ ability to describe how it will be indexed. The default (no annotation
+ present) means that the property is ignored by the indexing process.
+ <literal>@Field</literal> does declare a property as indexed. When
+ indexing an element to a Lucene document you can specify how it is
+ indexed:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><literal>name</literal> : describe under which name, the
+ property should be stored in the Lucene Document. The default value
+ is the property name (following the JavaBeans convention)</para>
+ </listitem>
+
+ <listitem>
+ <para><literal>store</literal> : describe whether or not the
+ property is stored in the Lucene index. You can store the value
+ <literal>Store.YES</literal> (consuming more space in the index but
+ allowing projection, see <xref linkend="projections" /> for more
+ information), store it in a compressed way
+ <literal>Store.COMPRESS</literal> (this does consume more CPU), or
+ avoid any storage <literal>Store.NO</literal> (this is the default
+ value). When a property is stored, you can retrieve its original
+ value from the Lucene Document. This is not related to whether the
+ element is indexed or not.</para>
+ </listitem>
+
+ <listitem>
+ <para>index: describe how the element is indexed and the type of
+ information store. The different values are
+ <literal>Index.NO</literal> (no indexing, ie cannot be found by a
+ query), <literal>Index.TOKENIZED</literal> (use an analyzer to
+ process the property), <literal>Index.UN_TOKENIZED</literal> (no
+ analyzer pre-processing), <literal>Index.NO_NORMS</literal> (do not
+ store the normalization data). The default value is
+ <literal>TOKENIZED</literal>.</para>
+ </listitem>
+
+ <listitem>
+ <para>termVector: describes collections of term-frequency pairs.
+ This attribute enables term vectors being stored during indexing so
+ they are available within documents. The default value is
+ TermVector.NO.</para>
+
+ <para>The different values of this attribute are:</para>
+
+ <informaltable align="left" width="">
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry align="center">Value</entry>
+
+ <entry align="center">Definition</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry align="left">TermVector.YES</entry>
+
+ <entry>Store the term vectors of each document. This
+ produces two synchronized arrays, one contains document
+ terms and the other contains the term's frequency.</entry>
+ </row>
+
+ <row>
+ <entry align="left">TermVector.NO</entry>
+
+ <entry>Do not store term vectors.</entry>
+ </row>
+
+ <row>
+ <entry align="left">TermVector.WITH_OFFSETS</entry>
+
+ <entry>Store the term vector and token offset information.
+ This is the same as TermVector.YES plus it contains the
+ starting and ending offset position information for the
+ terms.</entry>
+ </row>
+
+ <row>
+ <entry align="left">TermVector.WITH_POSITIONS</entry>
+
+ <entry>Store the term vector and token position information.
+ This is the same as TermVector.YES plus it contains the
+ ordinal positions of each occurrence of a term in a
+ document.</entry>
+ </row>
+
+ <row>
+ <entry align="left">TermVector.WITH_POSITION_OFFSETS</entry>
+
+ <entry>Store the term vector, token position and offset
+ information. This is a combination of the YES, WITH_OFFSETS
+ and WITH_POSITIONS.</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ </listitem>
+ </itemizedlist>
+
+ <para>Whether or not you want to store the original data in the index
+ depends on how you wish to use the index query result. For a regular
+ Hibernate Search usage storing is not necessary. However you might want
+ to store some fields to subsequently project them (see <xref
+ linkend="projections" /> for more information).</para>
+
+ <para>Whether or not you want to tokenize a property depends on whether
+ you wish to search the element as is, or by the words it contains. It
+ make sense to tokenize a text field, but probably not a date field.
+ <note>
+ <para>Fields used for sorting must not be tokenized.</para>
+ </note></para>
+
+ <para>Finally, the id property of an entity is a special property used
+ by Hibernate Search to ensure index unicity of a given entity. By
+ design, an id has to be stored and must not be tokenized. To mark a
+ property as index id, use the <literal>@DocumentId</literal> annotation.
+ If you are using Hibernate Annotations and you have specified @Id you
+ can omit @DocumentId. The chosen entity id will also be used as document
+ id.</para>
+
+ <example id="example-annotated-entity">
+ <title>Adding <classname>@DocumentId</classname> ad
+ <classname>@Field</classname> annotations to an indexed entity</title>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+public class Essay {
+ ...
+
+ @Id
+ <emphasis role="bold">@DocumentId</emphasis>
+ public Long getId() { return id; }
+
+ <emphasis role="bold">@Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ <emphasis role="bold">@Field(index=Index.TOKENIZED)</emphasis>
+ public String getText() { return text; }
+}</programlisting>
+ </example>
+
+ <para><xref linkend="example-annotated-entity" /> define an index with
+ three fields: <literal>id</literal> , <literal>Abstract</literal> and
+ <literal>text</literal> . Note that by default the field name is
+ decapitalized, following the JavaBean specification</para>
+ </section>
+
+ <section>
+ <title>Mapping properties multiple times</title>
+
+ <para>Sometimes one has to map a property multiple times per index, with
+ slightly different indexing strategies. For example, sorting a query by
+ field requires the field to be <literal>UN_TOKENIZED</literal>. If one
+ wants to search by words in this property and still sort it, one need to
+ index it twice - once tokenized and once untokenized. @Fields allows to
+ achieve this goal.</para>
+
+ <example id="example-fields-annotation">
+ <title>Using @Fields to map a property multiple times</title>
+
+ <programlisting>@Entity
+@Indexed(index = "Book" )
+public class Book {
+ <emphasis role="bold">@Fields( {</emphasis>
+ @Field(index = Index.TOKENIZED),
+ @Field(name = "summary_forSort", index = Index.UN_TOKENIZED, store = Store.YES)
+ <emphasis role="bold">} )</emphasis>
+ public String getSummary() {
+ return summary;
+ }
+
+ ...
+}</programlisting>
+ </example>
+
+ <para>In <xref linkend="example-fields-annotation" /> the field
+ <literal>summary</literal> is indexed twice, once as
+ <literal>summary</literal> in a tokenized way, and once as
+ <literal>summary_forSort</literal> in an untokenized way. @Field
+ supports 2 attributes useful when @Fields is used:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>analyzer: defines a @Analyzer annotation per field rather than
+ per property</para>
+ </listitem>
+
+ <listitem>
+ <para>bridge: defines a @FieldBridge annotation per field rather
+ than per property</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>See below for more information about analyzers and field
+ bridges.</para>
+ </section>
+
+ <section id="search-mapping-associated">
+ <title>Embedded and associated objects</title>
+
+ <para>Associated objects as well as embedded objects can be indexed as
+ part of the root entity index. This is useful if you expect to search a
+ given entity based on properties of associated objects. In the following
+ example the aim is to return places where the associated city is Atlanta
+ (In the Lucene query parser language, it would translate into
+ <code>address.city:Atlanta</code>).</para>
+
+ <example>
+ <title>Using @IndexedEmbedded to index associations</title>
+
+ <programlisting>@Entity
+@Indexed
+public class Place {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field( index = Index.TOKENIZED )
+ private String name;
+
+ @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )
+ <emphasis role="bold">@IndexedEmbedded</emphasis>
+ private Address address;
+ ....
+}
+
+@Entity
+public class Address {
+ @Id
+ @GeneratedValue
+ private Long id;
+
+ @Field(index=Index.TOKENIZED)
+ private String street;
+
+ @Field(index=Index.TOKENIZED)
+ private String city;
+
+ <emphasis role="bold">@ContainedIn</emphasis>
+ @OneToMany(mappedBy="address")
+ private Set<Place> places;
+ ...
+}</programlisting>
+ </example>
+
+ <para>In this example, the place fields will be indexed in the
+ <literal>Place</literal> index. The <literal>Place</literal> index
+ documents will also contain the fields <literal>address.id</literal>,
+ <literal>address.street</literal>, and <literal>address.city</literal>
+ which you will be able to query. This is enabled by the
+ <literal>@IndexedEmbedded</literal> annotation.</para>
+
+ <para>Be careful. Because the data is denormalized in the Lucene index
+ when using the <classname>@IndexedEmbedded</classname> technique,
+ Hibernate Search needs to be aware of any change in the
+ <classname>Place</classname> object and any change in the
+ <classname>Address</classname> object to keep the index up to date. To
+ make sure the <literal><classname>Place</classname></literal> Lucene
+ document is updated when it's <classname>Address</classname> changes,
+ you need to mark the other side of the bidirectional relationship with
+ <classname>@ContainedIn</classname>.</para>
+
+ <para><literal>@ContainedIn</literal> is only useful on associations
+ pointing to entities as opposed to embedded (collection of)
+ objects.</para>
+
+ <para>Let's make our example a bit more complex:</para>
+
+ <example>
+ <title>Nested usage of <classname>@IndexedEmbedded</classname> and
+ <classname>@ContainedIn</classname></title>
+
+ <programlisting>@Entity
+@Indexed
+public class Place {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field( index = Index.TOKENIZED )
+ private String name;
+
+ @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )
+ <emphasis role="bold">@IndexedEmbedded</emphasis>
+ private Address address;
+ ....
+}
+
+@Entity
+public class Address {
+ @Id
+ @GeneratedValue
+ private Long id;
+
+ @Field(index=Index.TOKENIZED)
+ private String street;
+
+ @Field(index=Index.TOKENIZED)
+ private String city;
+
+ <emphasis role="bold">@IndexedEmbedded(depth = 1, prefix = "ownedBy_")</emphasis>
+ private Owner ownedBy;
+
+ <emphasis role="bold">@ContainedIn</emphasis>
+ @OneToMany(mappedBy="address")
+ private Set<Place> places;
+ ...
+}
+
+@Embeddable
+public class Owner {
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ ...
+}</programlisting>
+ </example>
+
+ <para>Any <literal>@*ToMany, @*ToOne</literal> and
+ <literal>@Embedded</literal> attribute can be annotated with
+ <literal>@IndexedEmbedded</literal>. The attributes of the associated
+ class will then be added to the main entity index. In the previous
+ example, the index will contain the following fields</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>id</para>
+ </listitem>
+
+ <listitem>
+ <para>name</para>
+ </listitem>
+
+ <listitem>
+ <para>address.street</para>
+ </listitem>
+
+ <listitem>
+ <para>address.city</para>
+ </listitem>
+
+ <listitem>
+ <para>address.ownedBy_name</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>The default prefix is <literal>propertyName.</literal>, following
+ the traditional object navigation convention. You can override it using
+ the <literal>prefix</literal> attribute as it is shown on the
+ <literal>ownedBy</literal> property.</para>
+
+ <note>
+ <para>The prefix cannot be set to the empty string.</para>
+ </note>
+
+ <para>The<literal> depth</literal> property is necessary when the object
+ graph contains a cyclic dependency of classes (not instances). For
+ example, if <classname>Owner</classname> points to
+ <classname>Place</classname>. Hibernate Search will stop including
+ Indexed embedded attributes after reaching the expected depth (or the
+ object graph boundaries are reached). A class having a self reference is
+ an example of cyclic dependency. In our example, because
+ <literal>depth</literal> is set to 1, any
+ <literal>@IndexedEmbedded</literal> attribute in Owner (if any) will be
+ ignored.</para>
+
+ <para>Using <literal>@IndexedEmbedded</literal> for object associations
+ allows you to express queries such as:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Return places where name contains JBoss and where address city
+ is Atlanta. In Lucene query this would be</para>
+
+ <programlisting>+name:jboss +address.city:atlanta </programlisting>
+ </listitem>
+
+ <listitem>
+ <para>Return places where name contains JBoss and where owner's name
+ contain Joe. In Lucene query this would be</para>
+
+ <programlisting>+name:jboss +address.orderBy_name:joe </programlisting>
+ </listitem>
+ </itemizedlist>
+
+ <para>In a way it mimics the relational join operation in a more
+ efficient way (at the cost of data duplication). Remember that, out of
+ the box, Lucene indexes have no notion of association, the join
+ operation is simply non-existent. It might help to keep the relational
+ model normalized while benefiting from the full text index speed and
+ feature richness.</para>
+
+ <para><note>
+ <para>An associated object can itself (but does not have to) be
+ <literal>@Indexed</literal></para>
+ </note></para>
+
+ <para>When @IndexedEmbedded points to an entity, the association has to
+ be directional and the other side has to be annotated
+ <literal>@ContainedIn</literal> (as seen in the previous example). If
+ not, Hibernate Search has no way to update the root index when the
+ associated entity is updated (in our example, a <literal>Place</literal>
+ index document has to be updated when the associated
+ <classname>Address</classname> instance is updated).</para>
+
+ <para>Sometimes, the object type annotated by
+ <classname>@IndexedEmbedded</classname> is not the object type targeted
+ by Hibernate and Hibernate Search. This is especially the case when
+ interfaces are used in lieu of their implementation. For this reason you
+ can override the object type targeted by Hibernate Search using the
+ <methodname>targetElement</methodname> parameter.</para>
+
+ <example>
+ <title>Using the <literal>targetElement</literal> property of
+ <classname>@IndexedEmbedded</classname></title>
+
+ <programlisting>@Entity
+@Indexed
+public class Address {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index= Index.TOKENIZED)
+ private String street;
+
+ @IndexedEmbedded(depth = 1, prefix = "ownedBy_", <emphasis role="bold">targetElement = Owner.class</emphasis>)
+ @Target(Owner.class)
+ private Person ownedBy;
+
+
+ ...
+}
+
+@Embeddable
+public class Owner implements Person { ... }</programlisting>
+ </example>
+ </section>
+
+ <section id="section-boost-annotation">
+ <title>Boost factor</title>
+
+ <para>Lucene has the notion of <emphasis>boost factor</emphasis>. It's a
+ way to give more weight to a field or to an indexed element over others
+ during the indexation process. You can use <literal>@Boost</literal> at
+ the @Field, method or class level.</para>
+
+ <example>
+ <title>Using different ways of increasing the weight of an indexed
+ element using a boost factor</title>
+
+ <programlisting>@Entity
+@Indexed(index="indexes/essays")
+<emphasis role="bold">@Boost(1.7f)</emphasis>
+public class Essay {
+ ...
+
+ @Id
+ @DocumentId
+ public Long getId() { return id; }
+
+ @Field(name="Abstract", index=Index.TOKENIZED, store=Store.YES, boost=<emphasis
+ role="bold">@Boost(2f)</emphasis>)
+ <emphasis role="bold">@Boost(1.5f)</emphasis>
+ public String getSummary() { return summary; }
+
+ @Lob
+ @Field(index=Index.TOKENIZED, boost=<emphasis role="bold">@Boost(1.2f)</emphasis>)
+ public String getText() { return text; }
+
+ @Field
+ public String getISBN() { return isbn; }
+
+} </programlisting>
+ </example>
+
+ <para>In our example, <classname>Essay</classname>'s probability to
+ reach the top of the search list will be multiplied by 1.7. The
+ <methodname>summary</methodname> field will be 3.0 (2 * 1.5 -
+ <methodname>@Field.boost</methodname> and <classname>@Boost</classname>
+ on a property are cumulative) more important than the
+ <methodname>isbn</methodname> field. The <methodname>text</methodname>
+ field will be 1.2 times more important than the
+ <methodname>isbn</methodname> field. Note that this explanation in
+ strictest terms is actually wrong, but it is simple and close enough to
+ reality for all practical purposes. Please check the Lucene
+ documentation or the excellent <citetitle>Lucene In Action </citetitle>
+ from Otis Gospodnetic and Erik Hatcher.</para>
+ </section>
+
+ <section>
+ <title id="section-dynamic-boost">Dynamic boost factor</title>
+
+ <para>The <literal>@Boost </literal>annotation used in <xref
+ linkend="section-boost-annotation" /> defines a static boost factor
+ which is is independent of the state of of the indexed entity at
+ runtime. However, there are usecases in which the boost factor may
+ depends on the actual state of the entity. In this case you can use the
+ <literal>@DynamicBoost </literal>annotation together with an
+ accompanying custom <classname>BoostStrategy</classname>.<example
+ id="example-dynamic-boosting">
+ <title>Dynamic boost examle</title>
+
+ <programlisting>public enum PersonType {
+ NORMAL,
+ VIP
+}
+
+@Entity
+@Indexed
+<emphasis role="bold">@DynamicBoost(impl = VIPBoostStrategy.class)</emphasis>
+public class Person {
+ private PersonType type;
+
+ // ....
+}
+
+public class VIPBoostStrategy implements BoostStrategy {
+ <emphasis role="bold">public float defineBoost(Object value)</emphasis> {
+ Person person = ( Person ) value;
+ if ( person.getType().equals( PersonType.VIP ) ) {
+ return 2.0f;
+ }
+ else {
+ return 1.0f;
+ }
+ }
+}</programlisting>
+ </example>In <xref linkend="example-dynamic-boosting" /> a dynamic
+ boost is defined on class level specifying
+ <classname>VIPBoostStrategy</classname> as implementation of the
+ <classname>BoostStrategy</classname> interface to be used at indexing
+ time. You can place the <literal>@DynamicBoost</literal> either at class
+ or field level. Depending on the placement of the annotation either the
+ whole entity is passed to the <methodname>defineBoost</methodname>
+ method or just the annotated field/property value. It's up to you to
+ cast the passed object to the correct type. In the example all indexed
+ values of a VIP person would be double as important as the values of a
+ normal person.<note>
+ <para>The specified <classname>BoostStrategy</classname>
+ implementation must define a public no-arg constructor.</para>
+ </note>Of course you can mix and match <literal>@Boost</literal> and
+ <literal>@DynamicBoost</literal> annotations in your entity. All defined
+ boost factors are cummulative as described in <xref
+ linkend="section-boost-annotation" />.</para>
+ </section>
+
+ <section id="analyzer">
+ <title>Analyzer</title>
+
+ <para>The default analyzer class used to index tokenized fields is
+ configurable through the <literal>hibernate.search.analyzer</literal>
+ property. The default value for this property is
+ <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>.</para>
+
+ <para>You can also define the analyzer class per entity, property and
+ even per @Field (useful when multiple fields are indexed from a single
+ property).</para>
+
+ <example>
+ <title>Different ways of specifying an analyzer</title>
+
+ <programlisting>@Entity
+@Indexed
+<emphasis role="bold">@Analyzer(impl = EntityAnalyzer.class)</emphasis>
+public class MyEntity {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+ @Field(index = Index.TOKENIZED)
+ <emphasis role="bold">@Analyzer(impl = PropertyAnalyzer.class)</emphasis>
+ private String summary;
+
+ @Field(index = Index.TOKENIZED, <emphasis><emphasis role="bold">analyzer = @Analyzer(impl = FieldAnalyzer.class</emphasis>)</emphasis>
+ private String body;
+
+ ...
+}</programlisting>
+ </example>
+
+ <para>In this example, <classname>EntityAnalyzer</classname> is used to
+ index all tokenized properties (eg. <literal>name</literal>), except
+ <literal>summary</literal> and <literal>body</literal> which are indexed
+ with <classname>PropertyAnalyzer</classname> and
+ <classname>FieldAnalyzer</classname> respectively.</para>
+
+ <caution>
+ <para>Mixing different analyzers in the same entity is most of the
+ time a bad practice. It makes query building more complex and results
+ less predictable (for the novice), especially if you are using a
+ QueryParser (which uses the same analyzer for the whole query). As a
+ rule of thumb, for any given field the same analyzer should be used
+ for indexing and querying.</para>
+ </caution>
+
+ <section>
+ <title>Analyzer definitions</title>
+
+ <para>Analyzers can become quite complex to deal with for which reason
+ Hibernate Search introduces the notion of analyzer definitions. An
+ analyzer definition can be reused by many
+ <classname>@Analyzer</classname> declarations. An analyzer definition
+ is composed of:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>a name: the unique string used to refer to the
+ definition</para>
+ </listitem>
+
+ <listitem>
+ <para>a tokenizer: responsible for tokenizing the input stream
+ into individual words</para>
+ </listitem>
+
+ <listitem>
+ <para>a list of filters: each filter is responsible to remove,
+ modify or sometimes even add words into the stream provided by the
+ tokenizer</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>This separation of tasks - a tokenizer followed by a list of
+ filters - allows for easy reuse of each individual component and let
+ you build your customized analyzer in a very flexible way (just like
+ Lego). Generally speaking the <classname>Tokenizer</classname> starts
+ the analysis process by turning the character input into tokens which
+ are then further processed by the <classname>TokenFilter</classname>s.
+ Hibernate Search supports this infrastructure by utilizing the Solr
+ analyzer framework. Make sure to add<filename> solr-core.jar and
+ </filename><filename>solr-common.jar</filename> to your classpath to
+ use analyzer definitions. In case you also want to utilizing a
+ snowball stemmer also include the
+ <filename>lucene-snowball.jar.</filename> Other Solr analyzers might
+ depend on more libraries. For example, the
+ <classname>PhoneticFilterFactory</classname> depends on <ulink
+ url="http://commons.apache.org/codec">commons-codec</ulink>. Your
+ distribution of Hibernate Search provides these dependencies in its
+ <filename>lib</filename> directory.</para>
+
+ <example>
+ <title><classname>@AnalyzerDef</classname> and the Solr
+ framework</title>
+
+ <programlisting>@AnalyzerDef(name="customanalyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = ISOLatin1AccentFilterFactory.class),
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = StopFilterFactory.class, params = {
+ @Parameter(name="words", value= "org/hibernate/search/test/analyzer/solr/stoplist.properties" ),
+ @Parameter(name="ignoreCase", value="true")
+ })
+})
+public class Team {
+ ...
+}</programlisting>
+ </example>
+
+ <para>A tokenizer is defined by its factory which is responsible for
+ building the tokenizer and using the optional list of parameters. This
+ example use the standard tokenizer. A filter is defined by its factory
+ which is responsible for creating the filter instance using the
+ optional parameters. In our example, the StopFilter filter is built
+ reading the dedicated words property file and is expected to ignore
+ case. The list of parameters is dependent on the tokenizer or filter
+ factory.</para>
+
+ <warning>
+ <para>Filters are applied in the order they are defined in the
+ <classname>@AnalyzerDef</classname> annotation. Make sure to think
+ twice about this order.</para>
+ </warning>
+
+ <para>Once defined, an analyzer definition can be reused by an
+ <classname>@Analyzer</classname> declaration using the definition name
+ rather than declaring an implementation class.</para>
+
+ <example>
+ <title>Referencing an analyzer by name</title>
+
+ <programlisting>@Entity
+@Indexed
+@AnalyzerDef(name="customanalyzer", ... )
+public class Team {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String name;
+
+ @Field
+ private String location;
+
+ @Field <emphasis role="bold">@Analyzer(definition = "customanalyzer")</emphasis>
+ private String description;
+}</programlisting>
+ </example>
+
+ <para>Analyzer instances declared by
+ <classname>@AnalyzerDef</classname> are available by their name in the
+ <classname>SearchFactory</classname>.</para>
+
+ <programlisting>Analyzer analyzer = fullTextSession.getSearchFactory().getAnalyzer("customanalyzer");</programlisting>
+
+ <para>This is quite useful wen building queries. Fields in queries
+ should be analyzed with the same analyzer used to index the field so
+ that they speak a common "language": the same tokens are reused
+ between the query and the indexing process. This rule has some
+ exceptions but is true most of the time. Respect it unless you know
+ what you are doing.</para>
+ </section>
+
+ <section>
+ <title>Available analyzers</title>
+
+ <para>Solr and Lucene come with a lot of useful default tokenizers and
+ filters. You can find a complete list of tokenizer factories and
+ filter factories at <ulink
+ url="http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters">http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters</ulink>.
+ Let check a few of them.</para>
+
+ <table>
+ <title>Some of the available tokenizers</title>
+
+ <tgroup cols="3">
+ <thead>
+ <row>
+ <entry align="center">Factory</entry>
+
+ <entry align="center">Description</entry>
+
+ <entry align="center">parameters</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry>StandardTokenizerFactory</entry>
+
+ <entry>Use the Lucene StandardTokenizer</entry>
+
+ <entry>none</entry>
+ </row>
+
+ <row>
+ <entry>HTMLStripStandardTokenizerFactory</entry>
+
+ <entry>Remove HTML tags, keep the text and pass it to a
+ StandardTokenizer</entry>
+
+ <entry>none</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <table>
+ <title>Some of the available filters</title>
+
+ <tgroup cols="3">
+ <thead>
+ <row>
+ <entry align="center">Factory</entry>
+
+ <entry align="center">Description</entry>
+
+ <entry align="center">parameters</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry>StandardFilterFactory</entry>
+
+ <entry>Remove dots from acronyms and 's from words</entry>
+
+ <entry>none</entry>
+ </row>
+
+ <row>
+ <entry>LowerCaseFilterFactory</entry>
+
+ <entry>Lowercase words</entry>
+
+ <entry>none</entry>
+ </row>
+
+ <row>
+ <entry>StopFilterFactory</entry>
+
+ <entry>remove words (tokens) matching a list of stop
+ words</entry>
+
+ <entry><para><literal>words</literal>: points to a resource
+ file containing the stop words</para><para>ignoreCase: true if
+ <literal>case</literal> should be ignore when comparing stop
+ words, <literal>false</literal> otherwise </para></entry>
+ </row>
+
+ <row>
+ <entry>SnowballPorterFilterFactory</entry>
+
+ <entry>Reduces a word to it's root in a given language. (eg.
+ protect, protects, protection share the same root). Using such
+ a filter allows searches matching related words.</entry>
+
+ <entry><literal>language</literal>: Danish, Dutch, English,
+ Finnish, French, German, Italian, Norwegian, Portuguese,
+ Russian, Spanish, Swedish and a few more</entry>
+ </row>
+
+ <row>
+ <entry>ISOLatin1AccentFilterFactory</entry>
+
+ <entry>remove accents for languages like French</entry>
+
+ <entry>none</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <para>We recommend to check all the implementations of
+ <classname>org.apache.solr.analysis.TokenizerFactory</classname> and
+ <classname>org.apache.solr.analysis.TokenFilterFactory</classname> in
+ your IDE to see the implementations available.</para>
+ </section>
+
+ <section>
+ <title>Analyzer discriminator (experimental)</title>
+
+ <para>So far all the introduced ways to specify an analyzer were
+ static. However, there are use cases where it is useful to select an
+ analyzer depending on the current state of the entity to be indexed,
+ for example in multilingual applications. For an
+ <classname>BlogEntry</classname> class for example the analyzer could
+ depend on the language property of the entry. Depending on this
+ property the correct language specific stemmer should be chosen to
+ index the actual text.</para>
+
+ <para>To enable this dynamic analyzer selection Hibernate Search
+ introduces the <classname>AnalyzerDiscriminator</classname>
+ annotation. The following example demonstrates the usage of this
+ annotation:</para>
+
+ <para><example>
+ <title>Usage of @AnalyzerDiscriminator in order to select an
+ analyzer depending on the entity state</title>
+
+ <programlisting>@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class
+ )
+ }),
+ @AnalyzerDef(name = "de",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = GermanStemFilterFactory.class)
+ })
+})
+public class BlogEntry {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field
+ @AnalyzerDiscriminator(impl = LanguageDiscriminator.class)
+ private String language;
+
+ @Field
+ private String text;
+
+ private Set<BlogEntry> references;
+
+ // standard getter/setter
+ ...
+}</programlisting>
+
+ <programlisting>public class LanguageDiscriminator implements Discriminator {
+
+ public String getAnalyzerDefinitionName(Object value, Object entity, String field) {
+ if ( value == null || !( entity instanceof Article ) ) {
+ return null;
+ }
+ return (String) value;
+ }
+}</programlisting>
+ </example></para>
+
+ <para>The prerequisite for using
+ <classname>@AnalyzerDiscriminator</classname> is that all analyzers
+ which are going to be used are predefined via
+ <classname>@AnalyzerDef</classname> definitions. If this is the case
+ one can place the <classname>@AnalyzerDiscriminator</classname>
+ annotation either on the class or on a specific property of the entity
+ for which to dynamically select an analyzer. Via the
+ <literal>impl</literal> parameter of the
+ <classname>AnalyzerDiscriminator</classname> you specify a concrete
+ implementation of the <classname>Discriminator</classname> interface.
+ It is up to you to provide an implementation for this interface. The
+ only method you have to implement is
+ <classname>getAnalyzerDefinitionName()</classname> which gets called
+ for each field added to the Lucene document. The entity which is
+ getting indexed is also passed to the interface method. The
+ <literal>value</literal> parameter is only set if the
+ <classname>AnalyzerDiscriminator</classname> is placed on property
+ level instead of class level. In this case the value represents the
+ current value of this property.</para>
+
+ <para>An implemention of the <classname>Discriminator</classname>
+ interface has to return the name of an existing analyzer definition if
+ the analyzer should be set dynamically or <classname>null</classname>
+ if the default analyzer should not be overridden. The given example
+ assumes that the language parameter is either 'de' or 'en' which
+ matches the specified names in the
+ <classname>@AnalyzerDef</classname>s.</para>
+
+ <note>
+ <para>The <classname>@AnalyzerDiscriminator</classname> is currently
+ still experimental and the API might still change. We are hoping for
+ some feedback from the community about the usefulness and usability
+ of this feature.</para>
+ </note>
+ </section>
+
+ <section id="analyzer-retrievinganalyzer">
+ <title>Retrieving an analyzer</title>
+
+ <para>During indexing time, Hibernate Search is using analyzers under
+ the hood for you. In some situations, retrieving analyzers can be
+ handy. If your domain model makes use of multiple analyzers (maybe to
+ benefit from stemming, use phonetic approximation and so on), you need
+ to make sure to use the same analyzers when you build your
+ query.</para>
+
+ <note>
+ <para>This rule can be broken but you need a good reason for it. If
+ you are unsure, use the same analyzers.</para>
+ </note>
+
+ <para>You can retrieve the scoped analyzer for a given entity used at
+ indexing time by Hibernate Search. A scoped analyzer is an analyzer
+ which applies the right analyzers depending on the field indexed:
+ multiple analyzers can be defined on a given entity each one working
+ on an individual field, a scoped analyzer unify all these analyzers
+ into a context-aware analyzer. While the theory seems a bit complex,
+ using the right analyzer in a query is very easy.</para>
+
+ <example>
+ <title>Using the scoped analyzer when building a full-text
+ query</title>
+
+ <programlisting>org.apache.lucene.queryParser.QueryParser parser = new QueryParser(
+ "title",
+ fullTextSession.getSearchFactory().getAnalyzer( Song.class )
+);
+
+org.apache.lucene.search.Query luceneQuery =
+ parser.parse( "title:sky Or title_stemmed:diamond" );
+
+org.hibernate.Query fullTextQuery =
+ fullTextSession.createFullTextQuery( luceneQuery, Song.class );
+
+List result = fullTextQuery.list(); //return a list of managed objects </programlisting>
+ </example>
+
+ <para>In the example above, the song title is indexed in two fields:
+ the standard analyzer is used in the field <literal>title</literal>
+ and a stemming analyzer is used in the field
+ <literal>title_stemmed</literal>. By using the analyzer provided by
+ the search factory, the query uses the appropriate analyzer depending
+ on the field targeted.</para>
+
+ <para>If your query targets more that one query and you wish to use
+ your standard analyzer, make sure to describe it using an analyzer
+ definition. You can retrieve analyzers by their definition name using
+ <code>searchFactory.getAnalyzer(String)</code>.</para>
+ </section>
+ </section>
+ </section>
+
+ <section id="search-mapping-bridge">
+ <title>Property/Field Bridge</title>
+
+ <para>In Lucene all index fields have to be represented as Strings. For
+ this reason all entity properties annotated with <literal>@Field</literal>
+ have to be indexed in a String form. For most of your properties,
+ Hibernate Search does the translation job for you thanks to a built-in set
+ of bridges. In some cases, though you need a more fine grain control over
+ the translation process.</para>
+
+ <section>
+ <title>Built-in bridges</title>
+
+ <para>Hibernate Search comes bundled with a set of built-in bridges
+ between a Java property type and its full text representation.</para>
+
+ <variablelist>
+ <varlistentry>
+ <term>null</term>
+
+ <listitem>
+ <para>null elements are not indexed. Lucene does not support null
+ elements and this does not make much sense either.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.lang.String</term>
+
+ <listitem>
+ <para>String are indexed as is</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>short, Short, integer, Integer, long, Long, float, Float,
+ double, Double, BigInteger, BigDecimal</term>
+
+ <listitem>
+ <para>Numbers are converted in their String representation. Note
+ that numbers cannot be compared by Lucene (ie used in ranged
+ queries) out of the box: they have to be padded <note>
+ <para>Using a Range query is debatable and has drawbacks, an
+ alternative approach is to use a Filter query which will
+ filter the result query to the appropriate range.</para>
+
+ <para>Hibernate Search will support a padding mechanism</para>
+ </note></para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.util.Date</term>
+
+ <listitem>
+ <para>Dates are stored as yyyyMMddHHmmssSSS in GMT time
+ (200611072203012 for Nov 7th of 2006 4:03PM and 12ms EST). You
+ shouldn't really bother with the internal format. What is
+ important is that when using a DateRange Query, you should know
+ that the dates have to be expressed in GMT time.</para>
+
+ <para>Usually, storing the date up to the millisecond is not
+ necessary. <literal>@DateBridge</literal> defines the appropriate
+ resolution you are willing to store in the index ( <literal>
+ <literal>@DateBridge(resolution=Resolution.DAY)</literal>
+ </literal> ). The date pattern will then be truncated
+ accordingly.</para>
+
+ <programlisting>@Entity
+@Indexed
+public class Meeting {
+ @Field(index=Index.UN_TOKENIZED)
+ <emphasis role="bold">@DateBridge(resolution=Resolution.MINUTE)</emphasis>
+ private Date date;
+ ... </programlisting>
+
+ <warning>
+ <para>A Date whose resolution is lower than
+ <literal>MILLISECOND</literal> cannot be a
+ <literal>@DocumentId</literal></para>
+ </warning>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.net.URI, java.net.URL</term>
+
+ <listitem>
+ <para>URI and URL are converted to their string
+ representation</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>java.lang.Class</term>
+
+ <listitem>
+ <para>Class are converted to their fully qualified class name. The
+ thread context classloader is used when the class is
+ rehydrated</para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+ </section>
+
+ <section>
+ <title>Custom Bridge</title>
+
+ <para>Sometimes, the built-in bridges of Hibernate Search do not cover
+ some of your property types, or the String representation used by the
+ bridge does not meet your requirements. The following paragraphs
+ describe several solutions to this problem.</para>
+
+ <section>
+ <title>StringBridge</title>
+
+ <para>The simplest custom solution is to give Hibernate Search an
+ implementation of your expected
+ <emphasis><classname>Object</classname> </emphasis>to
+ <classname>String</classname> bridge. To do so you need to implements
+ the <literal>org.hibernate.search.bridge.StringBridge</literal>
+ interface. All implementations have to be thread-safe as they are used
+ concurrently.</para>
+
+ <example>
+ <title>Implementing your own
+ <classname>StringBridge</classname></title>
+
+ <programlisting>/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements <emphasis role="bold">StringBridge</emphasis> {
+
+ private int PADDING = 5;
+
+ <emphasis role="bold">public String objectToString(Object object)</emphasis> {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > PADDING)
+ throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < PADDING ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+} </programlisting>
+ </example>
+
+ <para>Then any property or field can use this bridge thanks to the
+ <literal>@FieldBridge</literal> annotation</para>
+
+ <programlisting><emphasis role="bold">@FieldBridge(impl = PaddedIntegerBridge.class)</emphasis>
+private Integer length; </programlisting>
+
+ <para>Parameters can be passed to the Bridge implementation making it
+ more flexible. The Bridge implementation implements a
+ <classname>ParameterizedBridge</classname> interface, and the
+ parameters are passed through the <literal>@FieldBridge</literal>
+ annotation.</para>
+
+ <example>
+ <title>Passing parameters to your bridge implementation</title>
+
+ <programlisting>public class PaddedIntegerBridge implements StringBridge, <emphasis
+ role="bold">ParameterizedBridge</emphasis> {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ <emphasis role="bold">public void setParameterValues(Map parameters)</emphasis> {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding)
+ throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+}
+
+
+//property
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ <emphasis role="bold">params = @Parameter(name="padding", value="10")</emphasis>
+ )
+private Integer length; </programlisting>
+ </example>
+
+ <para>The <classname>ParameterizedBridge</classname> interface can be
+ implemented by <classname>StringBridge</classname>,
+ <classname>TwoWayStringBridge</classname>,
+ <classname>FieldBridge</classname> implementations.</para>
+
+ <para>All implementations have to be thread-safe, but the parameters
+ are set during initialization and no special care is required at this
+ stage.</para>
+
+ <para>If you expect to use your bridge implementation on an id
+ property (ie annotated with <literal>@DocumentId</literal> ), you need
+ to use a slightly extended version of <literal>StringBridge</literal>
+ named <classname>TwoWayStringBridge</classname>. Hibernate Search
+ needs to read the string representation of the identifier and generate
+ the object out of it. There is not difference in the way the
+ <literal>@FieldBridge</literal> annotation is used.</para>
+
+ <example>
+ <title>Implementing a TwoWayStringBridge which can for example be
+ used for id properties</title>
+
+ <programlisting>public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static String PADDING_PROPERTY = "padding";
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = ( (Integer) object ).toString();
+ if (rawInteger.length() > padding)
+ throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ <emphasis role="bold">public Object stringToObject(String stringValue)</emphasis> {
+ return new Integer(stringValue);
+ }
+}
+
+
+//id property
+@DocumentId
+@FieldBridge(impl = PaddedIntegerBridge.class,
+ params = @Parameter(name="padding", value="10")
+private Integer id;
+ </programlisting>
+ </example>
+
+ <para>It is critically important for the two-way process to be
+ idempotent (ie object = stringToObject( objectToString( object ) )
+ ).</para>
+ </section>
+
+ <section>
+ <title>FieldBridge</title>
+
+ <para>Some use cases require more than a simple object to string
+ translation when mapping a property to a Lucene index. To give you the
+ greatest possible flexibility you can also implement a bridge as a
+ <classname>FieldBridge</classname>. This interface gives you a
+ property value and let you map it the way you want in your Lucene
+ <classname>Document</classname>.The interface is very similar in its
+ concept to the Hibernate<classname> UserType</classname>s.</para>
+
+ <para>You can for example store a given property in two different
+ document fields:</para>
+
+ <example>
+ <title>Implementing the FieldBridge interface in order to a given
+ property into multiple document fields</title>
+
+ <programlisting>/**
+ * Store the date in 3 different fields - year, month, day - to ease Range Query per
+ * year, month or day (eg get all the elements of December for the last 5 years).
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ <emphasis role="bold">public void set(String name, Object value, Document document,
+ LuceneOptions luceneOptions)</emphasis> {
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance(GMT);
+ cal.setTime(date);
+ int year = cal.get(Calendar.YEAR);
+ int month = cal.get(Calendar.MONTH) + 1;
+ int day = cal.get(Calendar.DAY_OF_MONTH);
+
+ // set year
+ Field field = new Field(name + ".year", String.valueOf(year),
+ luceneOptions.getStore(), luceneOptions.getIndex(),
+ luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+
+ // set month and pad it if needed
+ field = new Field(name + ".month", month < 10 ? "0" : ""
+ + String.valueOf(month), luceneOptions.getStore(),
+ luceneOptions.getIndex(), luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+
+ // set day and pad it if needed
+ field = new Field(name + ".day", day < 10 ? "0" : ""
+ + String.valueOf(day), luceneOptions.getStore(),
+ luceneOptions.getIndex(), luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+ }
+}
+
+//property
+<emphasis role="bold">@FieldBridge(impl = DateSplitBridge.class)</emphasis>
+private Date date; </programlisting>
+ </example>
+ </section>
+
+ <section>
+ <title>ClassBridge</title>
+
+ <para>It is sometimes useful to combine more than one property of a
+ given entity and index this combination in a specific way into the
+ Lucene index. The <classname>@ClassBridge</classname> and
+ <classname>@ClassBridge</classname> annotations can be defined at the
+ class level (as opposed to the property level). In this case the
+ custom field bridge implementation receives the entity instance as the
+ value parameter instead of a particular property. Though not shown in
+ this example, <classname>@ClassBridge</classname> supports the
+ <methodname>termVector</methodname> attribute discussed in section
+ <xref linkend="basic-mapping" />.</para>
+
+ <example>
+ <title>Implementing a class bridge</title>
+
+ <programlisting>@Entity
+@Indexed
+<emphasis role="bold">@ClassBridge</emphasis>(name="branchnetwork",
+ index=Index.TOKENIZED,
+ store=Store.YES,
+ impl = <emphasis role="bold">CatFieldsClassBridge.class</emphasis>,
+ params = @Parameter( name="sepChar", value=" " ) )
+public class Department {
+ private int id;
+ private String network;
+ private String branchHead;
+ private String branch;
+ private Integer maxEmployees
+ ...
+}
+
+
+public class CatFieldsClassBridge implements FieldBridge, ParameterizedBridge {
+ private String sepChar;
+
+ public void setParameterValues(Map parameters) {
+ this.sepChar = (String) parameters.get( "sepChar" );
+ }
+
+ <emphasis role="bold">public void set(String name, Object value, Document document, LuceneOptions luceneOptions)</emphasis> {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Department dep = (Department) value;
+ String fieldValue1 = dep.getBranch();
+ if ( fieldValue1 == null ) {
+ fieldValue1 = "";
+ }
+ String fieldValue2 = dep.getNetwork();
+ if ( fieldValue2 == null ) {
+ fieldValue2 = "";
+ }
+ String fieldValue = fieldValue1 + sepChar + fieldValue2;
+ Field field = new Field( name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+}</programlisting>
+ </example>
+
+ <para>In this example, the particular
+ <classname>CatFieldsClassBridge</classname> is applied to the
+ <literal>department</literal> instance, the field bridge then
+ concatenate both branch and network and index the
+ concatenation.</para>
+ </section>
+ </section>
+ </section>
+
+ <section id="provided-id">
+ <title>Providing your own id</title>
+
+ <warning>
+ <para>This part of the documentation is a work in progress.</para>
+ </warning>
+
+ <para>You can provide your own id for Hibernate Search if you are
+ extending the internals. You will have to generate a unique value so it
+ can be given to Lucene to be indexed. This will have to be given to
+ Hibernate Search when you create an org.hibernate.search.Work object - the
+ document id is required in the constructor.</para>
+
+ <section id="ProvidedId">
+ <title>The ProvidedId annotation</title>
+
+ <para>Unlike conventional Hibernate Search API and @DocumentId, this
+ annotation is used on the class and not a field. You also can provide
+ your own bridge implementation when you put in this annotation by
+ calling the bridge() which is on @ProvidedId. Also, if you annotate a
+ class with @ProvidedId, your subclasses will also get the annotation -
+ but it is not done by using the java.lang.annotations.@Inherited. Be
+ sure however, to <emphasis>not</emphasis> use this annotation with
+ @DocumentId as your system will break.</para>
+
+ <example>
+ <title>Providing your own id</title>
+
+ <programlisting>@ProvidedId (bridge = org.my.own.package.MyCustomBridge)
+@Indexed
+public class MyClass{
+ @Field
+ String MyString;
+ ...
+}</programlisting>
+ </example>
+ </section>
+ </section>
+
+ <section>
+ <title>Programmatic API</title>
+
+ <para><warning>
+ <para>This feature is considered experimental. While stable code-wise,
+ the API is subject to change in the future.</para>
+ </warning>Although the recommended approach for mapping indexed entities
+ is to use annotations, it is sometimes more convenient to use a different
+ approach:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>the same entity is mapped differently depending on deployment
+ needs (customization for clients)</para>
+ </listitem>
+
+ <listitem>
+ <para>some automatization process requires the dynamic mapping of many
+ entities sharing a common traits</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>While it has been a popular demand in the past, the Hibernate team
+ never found the idea of an XML alternative to annotations appealing due to
+ it's heavy duplication, lack of code refactoring safety, because it did
+ not cover all the use case spectrum and because we are in the 21st century
+ :)</para>
+
+ <para>Th idea of a programmatic API was much more appealing and has now
+ become a reality. You can programmatically and safely define your mapping
+ using a programmatic API: you define entities and fields as indexable by
+ using mapping classes which effectively mirror the annotation concepts in
+ Hibernate Search. Note that fan(s) of XML approach can design their own
+ schema and use the programmatic API to create the mapping while parsing
+ the XML stream.</para>
+
+ <para>In order to use the programmatic model you must first construct a
+ <classname>SearchMapping</classname> object. This object is passed to
+ Hibernate Search via a property set to the <classname>Configuration
+ </classname>object. The property key is
+ <literal>hibernate.search.model_mapping</literal> or it's type-safe
+ representation <classname>Environment.MODEL_MAPPING</classname>.</para>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+[...]
+configuration.setProperty( Environment.MODEL_MAPPING, mapping );
+
+//or in JPA
+SearchMapping mapping = new SearchMapping();
+[...]
+Map<String,String> properties = new HashMap<String,String)(1);
+properties.put( Environment.MODEL_MAPPING, mapping );
+EntityManagerFactory emf = Persistence.createEntityManagerFactory( "userPU", properties );</programlisting>
+
+ <para>The <classname>SearchMapping</classname> is the root object which
+ contains all the necessary indexable entities and fields. From there, the
+ <classname>SearchMapping</classname> object exposes a fluent (and thus
+ intuitive) API to express your mappings: it contextually exposes the
+ relevant mapping options in a type-safe way, just let your IDE
+ autocompletion feature guide you through.</para>
+
+ <para>Today, the programmatic API cannot be used on a class annotated with
+ Hibernate Search annotations, chose one approach or the other. Also note
+ that the same default values apply in annotations and the programmatic
+ API. For example, the <methodname>@Field.name</methodname> is defaulted to
+ the property name and does not have to be set.</para>
+
+ <para>Each core concept of the programmatic API has a corresponding
+ example to depict how the same definition would look using annotation.
+ Therefore seeing an annotation example of the programmatic approach should
+ give you a clear picture of what Hibernate Search will build with the
+ marked entities and associated properties.</para>
+
+ <section>
+ <title>Mapping an entity as indexable</title>
+
+ <para>The first concept of the programmatic API is to define an entity
+ as indexable. Using the annotation approach a user would mark the entity
+ as <classname>@Indexed</classname>, the following example demonstrates
+ how to programmatically achieve this.</para>
+
+ <para><example>
+ <title>Marking an entity indexable</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping.entity(Address.class)
+ .indexed()
+ .indexName("Address_Index"); //optional
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>As you can see you must first create a
+ <classname>SearchMapping</classname> object which is the root object
+ that is then passed to the <classname>Configuration</classname>
+ object as property. You must declare an entity and if you wish to
+ make that entity as indexable then you must call the
+ <methodname>indexed()</methodname> method. The <methodname>indexed()
+ </methodname>method has an optional <methodname>indexName(String
+ indexName)</methodname> which can be used to change the default
+ index name that is created by Hibernate Search. Using the annotation
+ model the above can be achieved as:</para>
+
+ <para><example>
+ <title>Annotation example of indexing entity</title>
+
+ <programlisting>@Entity
+@Indexed(index="Address_Index")
+public class Address {
+....
+}</programlisting>
+ </example></para>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Adding DocumentId to indexed entity</title>
+
+ <para>To set a property as a document id:</para>
+
+ <para><example>
+ <title>Enabling document id with programmatic model</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping.entity(Address.class).indexed()
+ .property("addressId", ElementType.FIELD) //field access
+ .documentId()
+ .name("id");
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping);</programlisting>
+
+ <para>The above is equivalent to annotating a property in the entity
+ as <classname>@DocumentId</classname> as seen in the following
+ example:</para>
+
+ <para><example>
+ <title>DocumentId annotation definition</title>
+
+ <programlisting>@Entity
+@Indexed
+public class Address {
+ @Id
+ @GeneratedValue
+ @DocumentId(name="id")
+ private Long addressId;
+
+ ....
+}</programlisting>
+ </example></para>
+ </example>The next section demonstrates how to programmatically define
+ analyzers.</para>
+ </section>
+
+ <section>
+ <title>Defining analyzers</title>
+
+ <para>Analyzers can be programmatically defined using the
+ <methodname>analyzerDef(String analyzerDef, Class<? extends
+ TokenizerFactory> tokenizerFactory) </methodname>method. This method
+ also enables you to define filters for the analyzer definition. Each
+ filter that you define can optionally take in parameters as seen in the
+ following example :</para>
+
+ <para><example>
+ <title>Defining analyzers using programmatic model</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ <emphasis role="bold">.analyzerDef( "ngram", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( NGramFilterFactory.class )
+ .param( "minGramSize", "3" )
+ .param( "maxGramSize", "3" )
+ .analyzerDef( "en", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( EnglishPorterFilterFactory.class )
+ .analyzerDef( "de", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( GermanStemFilterFactory.class )</emphasis>
+ .entity(Address.class).indexed()
+ .property("addressId", ElementType.METHOD) //getter access
+ .documentId()
+ .name("id");
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The analyzer mapping defined above is equivalent to the
+ annotation model using <classname>@AnalyzerDef</classname> in
+ conjunction with <classname>@AnalyzerDefs</classname>:</para>
+ </example><example>
+ <title>Analyzer definition using annotation</title>
+
+ <programlisting>@Indexed
+@Entity
+@AnalyzerDefs({
+ @AnalyzerDef(name = "ngram",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = NGramFilterFactory.class,
+ params = {
+ @Parameter(name = "minGramSize",value = "3"),
+ @Parameter(name = "maxGramSize",value = "3")
+ })
+ }),
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "de",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = GermanStemFilterFactory.class)
+ })
+
+})
+public class Address {
+...
+}</programlisting>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Defining full text filter definitions</title>
+
+ <para>The programmatic API provides easy mechanism for defining full
+ text filter definitions which is available via
+ <classname>@FullTextFilterDef</classname> and
+ <classname>@FullTextFilterDefs</classname>. Note that contrary to the
+ annotation equivalent, full text filter definitions are a global
+ construct and are not tied to an entity. The next example depicts the
+ creation of full text filter definition using the
+ <methodname>fullTextFilterDef </methodname>method.</para>
+
+ <para><example>
+ <title>Defining full text definition programmatically</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .analyzerDef( "en", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( EnglishPorterFilterFactory.class )
+ <emphasis role="bold">.fullTextFilterDef("security", SecurityFilterFactory.class)
+ .cache(FilterCacheModeType.INSTANCE_ONLY)</emphasis>
+ .entity(Address.class)
+ .indexed()
+ .property("addressId", ElementType.METHOD)
+ .documentId()
+ .name("id")
+ .property("street1", ElementType.METHOD)
+ .field()
+ .analyzer("en")
+ .store(Store.YES)
+ .field()
+ .name("address_data")
+ .analyzer("en")
+ .store(Store.NO);
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The previous example can effectively been seen as annotating
+ your entity with <classname>@FullTextFilterDef</classname> like
+ below:</para>
+ </example><example>
+ <title>Using annotation to define full text filter
+ definition</title>
+
+ <programlisting>@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class)
+ })
+})
+@FullTextFilterDefs({
+ @FullTextFilterDef(name = "security", impl = SecurityFilterFactory.class, cache = FilterCacheModeType.INSTANCE_ONLY)
+})
+public class Address {
+
+ @Id
+ @GeneratedValue
+ @DocumentId(name="id")
+ pubblic Long getAddressId() {...};
+
+ @Fields({
+ @Field(index=Index.TOKENIZED, store=Store.YES,
+ analyzer=@Analyzer(definition="en")),
+ @Field(name="address_data", analyzer=@Analyzer(definition="en"))
+ })
+ public String getAddress1() {...};
+
+ ......
+
+}</programlisting>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Defining fields for indexing</title>
+
+ <para>When defining fields for indexing using the programmatic API, call
+ <methodname>field()</methodname> on the <methodname>property(String
+ propertyName, ElementType elementType)</methodname> method. From
+ <methodname>field()</methodname> you can specify the <methodname>name,
+ index</methodname>, <methodname>store</methodname>,
+ <methodname>bridge</methodname> and <methodname>analyzer</methodname>
+ definitions.</para>
+
+ <para><example>
+ <title>Indexing fields using programmatic API</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .analyzerDef( "en", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( EnglishPorterFilterFactory.class )
+ .entity(Address.class).indexed()
+ .property("addressId", ElementType.METHOD)
+ .documentId()
+ .name("id")
+ .property("street1", ElementType.METHOD)
+ <emphasis role="bold">.field()
+ .analyzer("en")
+ .store(Store.YES)
+ .index(Index.TOKENIZED) //no useful here as it's the default
+ .field()
+ .name("address_data")
+ .analyzer("en");</emphasis>
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The above example of marking fields as indexable is equivalent
+ to defining fields using <classname>@Field</classname> as seen
+ below:</para>
+ </example><example>
+ <title>Indexing fields using annotation</title>
+
+ <programlisting>@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class)
+ })
+})
+public class Address {
+
+ @Id
+ @GeneratedValue
+ @DocumentId(name="id")
+ private Long getAddressId() {...};
+
+ @Fields({
+ @Field(index=Index.TOKENIZED, store=Store.YES,
+ analyzer=@Analyzer(definition="en")),
+ @Field(name="address_data", analyzer=@Analyzer(definition="en"))
+ })
+ public String getAddress1() {...}
+
+ ......
+}</programlisting>
+
+ <para></para>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Programmatically defining embedded entities</title>
+
+ <para>In this section you will see how to programmatically define
+ entities to be embedded into the indexed entity similar to using the
+ <classname>@IndexEmbedded</classname> model. In order to define this you
+ must mark the property as <methodname>indexEmbedded. </methodname>The is
+ the option to add a prefix to the embedded entity definition and this
+ can be done by calling <methodname>prefix</methodname> as seen in the
+ example below:</para>
+
+ <para><example>
+ <title>Programmatically defining embedded entites</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mappping
+ .entity(ProductCatalog.class)
+ .indexed()
+ .property("catalogId", ElementType.METHOD)
+ .documentId()
+ .name("id")
+ .property("title", ElementType.METHOD)
+ .field()
+ .index(Index.TOKENIZED)
+ .store(Store.NO)
+ .property("description", ElementType.METHOD)
+ .field()
+ .index(Index.TOKENIZED)
+ .store(Store.NO)
+ .property("items", ElementType.METHOD)
+ <emphasis role="bold">.indexEmbedded()
+ .prefix("catalog.items"); //optional</emphasis>
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The next example shows the same definition using annotation
+ (<classname>@IndexEmbedded</classname>):</para>
+ </example><example>
+ <title>Using @IndexEmbedded</title>
+
+ <programlisting>@Entity
+@Indexed
+public class ProductCatalog {
+ @Id
+ @GeneratedValue
+ @DocumentId(name="id")
+ public Long getCatalogId() {...}
+
+ @Field(store=Store.NO, index=Index.TOKENIZED)
+ public String getTitle() {...}
+
+ @Field(store=Store.NO, index=Index.TOKENIZED)
+ public String getDescription();
+
+ @OneToMany(fetch = FetchType.LAZY)
+ @IndexColumn(name = "list_position")
+ @Cascade(org.hibernate.annotations.CascadeType.ALL)
+ @IndexEmbedded(prefix="catalog.items")
+ public List<Item> getItems() {...}
+
+ ...
+
+}</programlisting>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Contained In definition</title>
+
+ <para><classname>@ContainedIn</classname> can be define as seen in the
+ example below:<example>
+ <title>Programmatically defining ContainedIn</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mappping
+ .entity(ProductCatalog.class)
+ .indexed()
+ .property("catalogId", ElementType.METHOD)
+ .documentId()
+ .property("title", ElementType.METHOD)
+ .field()
+ .property("description", ElementType.METHOD)
+ .field()
+ .property("items", ElementType.METHOD)
+ .indexEmbedded()
+
+ .entity(Item.class)
+ .property("description", ElementType.METHOD)
+ .field()
+ .property("productCatalog", ElementType.METHOD)
+ <emphasis role="bold">.containedIn()</emphasis>;
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>This is equivalent to defining
+ <classname>@ContainedIn</classname> in your entity:</para>
+
+ <para><example>
+ <title>Annotation approach for ContainedIn</title>
+
+ <programlisting>@Entity
+@Indexed
+public class ProductCatalog {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Long getCatalogId() {...}
+
+ @Field
+ public String getTitle() {...}
+
+ @Field
+ public String getDescription() {...}
+
+ @OneToMany(fetch = FetchType.LAZY)
+ @IndexColumn(name = "list_position")
+ @Cascade(org.hibernate.annotations.CascadeType.ALL)
+ @IndexEmbedded
+ private List<Item> getItems() {...}
+
+ ...
+
+}
+
+
+@Entity
+public class Item {
+
+ @Id
+ @GeneratedValue
+ private Long itemId;
+
+ @Field
+ public String getDescription() {...}
+
+ @ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )
+ @ContainedIn
+ public ProductCatalog getProductCatalog() {...}
+
+ ...
+}</programlisting>
+ </example></para>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Date/Calendar Bridge</title>
+
+ <para>In order to define a calendar or date bridge mapping, call the
+ <methodname>dateBridge(Resolution resolution) </methodname>or
+ <methodname>calendarBridge(Resolution resolution)</methodname> methods
+ after you have defined a <methodname>field()</methodname> in the
+ <classname>SearchMapping </classname>hierarchy.</para>
+
+ <para><example>
+ <title>Programmatic model for defining calendar/date bridge</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .entity(Address.class)
+ .indexed()
+ .property("addressId", ElementType.FIELD)
+ .documentId()
+ .property("street1", ElementType.FIELD()
+ .field()
+ .property("createdOn", ElementType.FIELD)
+ .field()
+ <emphasis role="bold">.dateBridge(Resolution.DAY)</emphasis>
+ .property("lastUpdated", ElementType.FIELD)
+ <emphasis role="bold">.calendarBridge(Resolution.DAY)</emphasis>;
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>See below for defining the above using
+ <classname>@CalendarBridge</classname> and
+ <classname>@DateBridge</classname>:</para>
+ </example><example>
+ <title>@CalendarBridge and @DateBridge definition</title>
+
+ <programlisting>@Entity
+@Indexed
+public class Address {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long addressId;
+
+ @Field
+ private String address1;
+
+ @Field
+ @DateBridge(resolution=Resolution.DAY)
+ private Date createdOn;
+
+ @CalendarBridge(resolution=Resolution.DAY)
+ private Calendar lastUpdated;
+
+ ...
+}</programlisting>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Defining bridges</title>
+
+ <para>It is possible to associate bridges to programmatically defined
+ fields. When you define a <methodname>field()</methodname>
+ programmatically you can use the <methodname>bridge(Class<?>
+ impl)</methodname> to associate a <classname>FieldBridge
+ </classname>implementation class. The bridge method also provides
+ optional methods to include any parameters required for the bridge
+ class. The below shows an example of programmatically defining a
+ bridge:</para>
+
+ <para><example>
+ <title>Defining field bridges programmatically</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .entity(Address.class)
+ .indexed()
+ .property("addressId", ElementType.FIELD)
+ .documentId()
+ .property("street1", ElementType.FIELD)
+ .field()
+ .field()
+ .name("street1_abridged")
+ <emphasis role="bold">.bridge( ConcatStringBridge.class )
+ .param( "size", "4" )</emphasis>;
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The above can equally be defined using annotations, as seen in
+ the next example.</para>
+
+ <para><example>
+ <title>Defining field bridges using annotation</title>
+
+ <programlisting>@Entity
+@Indexed
+
+public class Address {
+
+ @Id
+ @GeneratedValue
+ @DocumentId(name="id")
+ private Long addressId;
+
+ @Fields({
+ @Field,
+ @Field(name="street1_abridged",
+ bridge= @FieldBridge(impl = ConcatStringBridge.class,
+ params = @Parameter( name="size", value="4" ))
+ })
+ private String address1;
+
+ ...
+}</programlisting>
+ </example></para>
+ </example></para>
+ </section>
+
+ <section>
+ <title>Mapping class bridge</title>
+
+ <para>You can define class bridges on entities programmatically. This is
+ shown in the next example:</para>
+
+ <example>
+ <title>Defining class briges using API</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .entity(Departments.class)
+<emphasis> .classBridge(CatDeptsFieldsClassBridge.class)
+ .name("branchnetwork")
+ .index(Index.TOKENIZED)
+ .store(Store.YES)
+ .param("sepChar", " ")
+ .classBridge(EquipmentType.class)
+ .name("equiptype")
+ .index(Index.TOKENIZED)
+ .store(Store.YES)
+ .param("C", "Cisco")
+ .param("D", "D-Link")
+ .param("K", "Kingston")
+ .param("3", "3Com")</emphasis>
+ .indexed();
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );
+
+</programlisting>
+
+ <para>The above is similar to using <classname>@ClassBridge
+ </classname>as seen in the next example:<example>
+ <title>Using @ClassBridge</title>
+
+ <programlisting>@Entity
+@Indexed
+@ClassBridges ( {
+ @ClassBridge(name="branchnetwork",
+ index= Index.TOKENIZED,
+ store= Store.YES,
+ impl = CatDeptsFieldsClassBridge.class,
+ params = @Parameter( name="sepChar", value=" " ) ),
+ @ClassBridge(name="equiptype",
+ index= Index.TOKENIZED,
+ store= Store.YES,
+ impl = EquipmentType.class,
+ params = {@Parameter( name="C", value="Cisco" ),
+ @Parameter( name="D", value="D-Link" ),
+ @Parameter( name="K", value="Kingston" ),
+ @Parameter( name="3", value="3Com" )
+ })
+})
+public class Departments {
+
+....
+
+}</programlisting>
+ </example></para>
+ </example>
+ </section>
+
+ <section>
+ <title>Mapping dynamic boost</title>
+
+ <para>You can apply a dynamic boost
+ factor on either a field or a whole entity: </para>
+
+ <example>
+ <title>DynamicBoost mapping using programmatic model</title>
+
+ <programlisting>SearchMapping mapping = new SearchMapping();
+
+mapping
+ .entity(DynamicBoostedDescLibrary.class)
+ .indexed()
+ <emphasis>.dynamicBoost(CustomBoostStrategy.class)</emphasis>
+ .property("libraryId", ElementType.FIELD)
+ .documentId().name("id")
+ .property("name", ElementType.FIELD)
+ <emphasis>.dynamicBoost(CustomFieldBoostStrategy.class)</emphasis>;
+ .field()
+ .store(Store.YES)
+
+cfg.getProperties().put( "hibernate.search.model_mapping", mapping );</programlisting>
+
+ <para>The next example shows the equivalent mapping using the
+ <classname>@DynamicBoost</classname> annotation: <example>
+ <title>Using the @DynamicBoost </title>
+
+ <programlisting>@Entity
+@Indexed
+@DynamicBoost(impl = CustomBoostStrategy.class)
+public class DynamicBoostedDescriptionLibrary {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+
+ private float dynScore;
+
+ @Field(store = Store.YES)
+ @DynamicBoost(impl = CustomFieldBoostStrategy.class)
+ private String name;
+
+ public DynamicBoostedDescriptionLibrary() {
+ dynScore = 1.0f;
+ }
+
+ .......
+
+}</programlisting>
+ </example></para>
+ </example>
+ </section>
+ </section>
+</chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/mapping.xml
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/optimize.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/optimize.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/optimize.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,166 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-optimize">
+ <!-- $Id$ -->
+
+ <title>Index Optimization</title>
+
+ <para>From time to time, the Lucene index needs to be optimized. The process
+ is essentially a defragmentation. Until an optimization is triggered Lucene
+ only marks deleted documents as such, no physical deletions are applied.
+ During the optimization process the deletions will be applied which also
+ effects the number of files in the Lucene Directory.</para>
+
+ <para>Optimizing the Lucene index speeds up searches but has no effect on
+ the indexation (update) performance. During an optimization, searches can be
+ performed, but will most likely be slowed down. All index updates will be
+ stopped. It is recommended to schedule optimization:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>on an idle system or when the searches are less frequent</para>
+ </listitem>
+
+ <listitem>
+ <para>after a lot of index modifications</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>When using a <classname>MassIndexer</classname> (see
+ <xref linkend="search-batchindex-massindexer"/>) it will optimize involved
+ indexes by default at the start and at the end of processing; you can change
+ this behavior by using respectively
+ <classname>MassIndexer</classname>.<methodname>optimizeAfterPurge</methodname>
+ and <classname>MassIndexer</classname>.<methodname>optimizeOnFinish</methodname>.</para>
+
+ <section>
+ <title>Automatic optimization</title>
+
+ <para>Hibernate Search can automatically optimize an index after:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>a certain amount of operations (insertion, deletion)</para>
+ </listitem>
+
+ <listitem>
+ <para>or a certain amount of transactions </para>
+ </listitem>
+ </itemizedlist>
+
+ <para>The configuration for automatic index optimization can be defined on
+ a global level or per index:</para>
+
+ <example>
+ <title>Defining automatic optimization parameters</title>
+
+ <programlisting>hibernate.search.default.optimizer.operation_limit.max = 1000
+hibernate.search.default.optimizer.transaction_limit.max = 100
+hibernate.search.Animal.optimizer.transaction_limit.max = 50</programlisting>
+ </example>
+
+ <para>An optimization will be triggered to the <literal>Animal</literal>
+ index as soon as either:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>the number of additions and deletions reaches 1000</para>
+ </listitem>
+
+ <listitem>
+ <para>the number of transactions reaches 50
+ (<constant>hibernate.search.Animal.optimizer.transaction_limit.max</constant>
+ having priority over
+ <constant>hibernate.search.default.optimizer.transaction_limit.max</constant>)</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>If none of these parameters are defined, no optimization is
+ processed automatically.</para>
+ </section>
+
+ <section>
+ <title>Manual optimization</title>
+
+ <para>You can programmatically optimize (defragment) a Lucene index from
+ Hibernate Search through the <classname>SearchFactory</classname>:</para>
+
+ <example>
+ <title>Programmatic index optimization</title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession(regularSession);
+SearchFactory searchFactory = fullTextSession.getSearchFactory();
+
+searchFactory.optimize(Order.class);
+// or
+searchFactory.optimize();</programlisting>
+ </example>
+
+ <para>The first example optimizes the Lucene index holding
+ <classname>Order</classname>s; the second, optimizes all indexes.</para>
+
+ <note>
+ <para><literal>searchFactory.optimize()</literal> has no effect on a JMS
+ backend. You must apply the optimize operation on the Master
+ node.</para>
+ </note>
+ </section>
+
+ <section>
+ <title>Adjusting optimization</title>
+
+ <para>Apache Lucene has a few parameters to influence how optimization is
+ performed. Hibernate Search exposes those parameters.</para>
+
+ <para>Further index optimization parameters include: <itemizedlist>
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_buffered_docs</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_field_length</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_merge_docs</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].merge_factor</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].ram_buffer_size</literal>
+ </listitem>
+
+ <listitem>
+ <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].term_index_interval</literal>
+ </listitem>
+ </itemizedlist> See <xref linkend="lucene-indexing-performance" /> for
+ more details.</para>
+ </section>
+</chapter>
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/optimize.xml
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/docbook/en-US/modules/query.xml
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/en-US/modules/query.xml (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/en-US/modules/query.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,968 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Hibernate, Relational Persistence for Idiomatic Java
+ ~
+ ~ Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ ~ indicated by the @author tags or express copyright attribution
+ ~ statements applied by the authors. All third-party contributions are
+ ~ distributed under license by Red Hat Middleware LLC.
+ ~
+ ~ This copyrighted material is made available to anyone wishing to use, modify,
+ ~ copy, or redistribute it subject to the terms and conditions of the GNU
+ ~ Lesser General Public License, as published by the Free Software Foundation.
+ ~
+ ~ This program is distributed in the hope that it will be useful,
+ ~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ ~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ ~ for more details.
+ ~
+ ~ You should have received a copy of the GNU Lesser General Public License
+ ~ along with this distribution; if not, write to:
+ ~ Free Software Foundation, Inc.
+ ~ 51 Franklin Street, Fifth Floor
+ ~ Boston, MA 02110-1301 USA
+ -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.5//EN"
+"http://www.oasis-open.org/docbook/xml/4.5/docbookx.dtd">
+<chapter id="search-query" xreflabel="Querying">
+ <!-- $Id$ -->
+
+ <title>Querying</title>
+
+ <para>The second most important capability of Hibernate Search is the
+ ability to execute a Lucene query and retrieve entities managed by an
+ Hibernate session, providing the power of Lucene without leaving the
+ Hibernate paradigm, and giving another dimension to the Hibernate classic
+ search mechanisms (HQL, Criteria query, native SQL query). Preparing and
+ executing a query consists of four simple steps:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Creating a <classname>FullTextSession</classname></para>
+ </listitem>
+
+ <listitem>
+ <para>Creating a Lucene query</para>
+ </listitem>
+
+ <listitem>
+ <para>Wrapping the Lucene query using a
+ <classname>org.hibernate.Query</classname></para>
+ </listitem>
+
+ <listitem>
+ <para>Executing the search by calling for example
+ <methodname>list()</methodname> or
+ <methodname>scroll()</methodname></para>
+ </listitem>
+ </itemizedlist>
+
+ <para>To access the querying facilities, you have to use an
+ <classname>FullTextSession</classname>. This Search specific session wraps a
+ regular <classname>org.hibernate.Session</classname> to provide query and
+ indexing capabilities.</para>
+
+ <example>
+ <title>Creating a FullTextSession</title>
+
+ <programlisting>Session session = sessionFactory.openSession();
+...
+FullTextSession fullTextSession = Search.getFullTextSession(session); </programlisting>
+ </example>
+
+ <para>The actual search facility is built on native Lucene queries which the
+ following example illustrates.</para>
+
+ <example>
+ <title>Creating a Lucene query</title>
+
+ <programlisting>org.apache.lucene.queryParser.QueryParser parser =
+ new QueryParser("title", new StopAnalyzer() );
+
+org.apache.lucene.search.Query luceneQuery = parser.parse( "summary:Festina Or brand:Seiko" );
+<emphasis role="bold">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );
+ </emphasis>
+List result = fullTextQuery.list(); //return a list of managed objects </programlisting>
+ </example>
+
+ <para>The Hibernate query built on top of the Lucene query is a regular
+ <literal>org.hibernate.Query</literal>, which means you are in the same
+ paradigm as the other Hibernate query facilities (HQL, Native or Criteria).
+ The regular <literal>list()</literal> , <literal>uniqueResult()</literal>,
+ <literal>iterate()</literal> and <literal>scroll()</literal> methods can be
+ used.</para>
+
+ <para>In case you are using the Java Persistence APIs of Hibernate (aka EJB
+ 3.0 Persistence), the same extensions exist:</para>
+
+ <example>
+ <title>Creating a Search query using the JPA API</title>
+
+ <programlisting>EntityManager em = entityManagerFactory.createEntityManager();
+
+FullTextEntityManager fullTextEntityManager =
+ org.hibernate.search.jpa.Search.getFullTextEntityManager(em);
+
+...
+org.apache.lucene.queryParser.QueryParser parser =
+ new QueryParser("title", new StopAnalyzer() );
+
+org.apache.lucene.search.Query luceneQuery = parser.parse( "summary:Festina Or brand:Seiko" );
+<emphasis role="bold">javax.persistence.Query fullTextQuery = fullTextEntityManager.createFullTextQuery( luceneQuery );</emphasis>
+
+List result = fullTextQuery.getResultList(); //return a list of managed objects </programlisting>
+ </example>
+
+ <para>The following examples we will use the Hibernate APIs but the same
+ example can be easily rewritten with the Java Persistence API by just
+ adjusting the way the <classname>FullTextQuery</classname> is
+ retrieved.</para>
+
+ <section>
+ <title>Building queries</title>
+
+ <para>Hibernate Search queries are built on top of Lucene queries which
+ gives you total freedom on the type of Lucene query you want to execute.
+ However, once built, Hibernate Search wraps further query processing using
+ <classname>org.hibernate.Query</classname> as your primary query
+ manipulation API.</para>
+
+ <section>
+ <title>Building a Lucene query</title>
+
+ <para>It is out of the scope of this documentation on how to exactly
+ build a Lucene query. Please refer to the online Lucene documentation or
+ get hold of a copy of either Lucene In Action or Hibernate Search in
+ Action.</para>
+ </section>
+
+ <section>
+ <title>Building a Hibernate Search query</title>
+
+ <section>
+ <title>Generality</title>
+
+ <para>Once the Lucene query is built, it needs to be wrapped into an
+ Hibernate Query.</para>
+
+ <example>
+ <title>Wrapping a Lucene query into a Hibernate Query</title>
+
+ <programlisting>FullTextSession fullTextSession = Search.getFullTextSession( session );
+org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );</programlisting>
+ </example>
+
+ <para>If not specified otherwise, the query will be executed against
+ all indexed entities, potentially returning all types of indexed
+ classes. It is advised, from a performance point of view, to restrict
+ the returned types:</para>
+
+ <example>
+ <title>Filtering the search result by entity type</title>
+
+ <programlisting>org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Customer.class );
+// or
+fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Item.class, Actor.class );</programlisting>
+ </example>
+
+ <para>The first example returns only matching
+ <classname>Customer</classname>s, the second returns matching
+ <classname>Actor</classname>s and <classname>Item</classname>s. The
+ type restriction is fully polymorphic which means that if there are
+ two indexed subclasses <classname>Salesman</classname> and
+ <classname>Customer</classname> of the baseclass
+ <classname>Person</classname>, it is possible to just specify
+ <classname>Person.class</classname> in order to filter on result
+ types.</para>
+ </section>
+
+ <section>
+ <title>Pagination</title>
+
+ <para>Out of performance reasons it is recommended to restrict the
+ number of returned objects per query. In fact is a very common use
+ case anyway that the user navigates from one page to an other. The way
+ to define pagination is exactly the way you would define pagination in
+ a plain HQL or Criteria query.</para>
+
+ <example>
+ <title>Defining pagination for a search query</title>
+
+ <programlisting>org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Customer.class );
+fullTextQuery.setFirstResult(15); //start from the 15th element
+fullTextQuery.setMaxResults(10); //return 10 elements</programlisting>
+ </example>
+
+ <note>
+ <para>It is still possible to get the total number of matching
+ elements regardless of the pagination via
+ <methodname>fulltextQuery.</methodname><methodname>getResultSize()</methodname></para>
+ </note>
+ </section>
+
+ <section>
+ <title>Sorting</title>
+
+ <para>Apache Lucene provides a very flexible and powerful way to sort
+ results. While the default sorting (by relevance) is appropriate most
+ of the time, it can be interesting to sort by one or several other
+ properties. In order to do so set the Lucene Sort object to apply a
+ Lucene sorting strategy.</para>
+
+ <example>
+ <title>Specifying a Lucene <classname>Sort</classname> in order to
+ sort the results</title>
+
+ <programlisting>org.hibernate.search.FullTextQuery query = s.createFullTextQuery( query, Book.class );
+org.apache.lucene.search.Sort sort = new Sort(new SortField("title"));
+<emphasis role="bold">query.setSort(sort);</emphasis>
+List results = query.list();</programlisting>
+ </example>
+
+ <para>One can notice the <classname>FullTextQuery</classname>
+ interface which is a sub interface of
+ <classname>org.hibernate.Query</classname>. Be aware that fields used
+ for sorting must not be tokenized.</para>
+ </section>
+
+ <section>
+ <title>Fetching strategy</title>
+
+ <para>When you restrict the return types to one class, Hibernate
+ Search loads the objects using a single query. It also respects the
+ static fetching strategy defined in your domain model.</para>
+
+ <para>It is often useful, however, to refine the fetching strategy for
+ a specific use case.</para>
+
+ <example>
+ <title>Specifying <classname>FetchMode</classname> on a
+ query</title>
+
+ <programlisting>Criteria criteria = s.createCriteria( Book.class ).setFetchMode( "authors", FetchMode.JOIN );
+s.createFullTextQuery( luceneQuery ).setCriteriaQuery( criteria );</programlisting>
+ </example>
+
+ <para>In this example, the query will return all Books matching the
+ luceneQuery. The authors collection will be loaded from the same query
+ using an SQL outer join.</para>
+
+ <para>When defining a criteria query, it is not needed to restrict the
+ entity types returned while creating the Hibernate Search query from
+ the full text session: the type is guessed from the criteria query
+ itself. Only fetch mode can be adjusted, refrain from applying any
+ other restriction.</para>
+
+ <para>One cannot use <methodname>setCriteriaQuery</methodname> if more
+ than one entity type is expected to be returned.</para>
+ </section>
+
+ <section id="projections">
+ <title>Projection</title>
+
+ <para>For some use cases, returning the domain object (graph) is
+ overkill. Only a small subset of the properties is necessary.
+ Hibernate Search allows you to return a subset of properties:</para>
+
+ <example>
+ <title>Using projection instead of returning the full domain
+ object</title>
+
+ <programlisting>org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );
+query.<emphasis role="bold">setProjection( "id", "summary", "body", "mainAuthor.name" )</emphasis>;
+List results = query.list();
+Object[] firstResult = (Object[]) results.get(0);
+Integer id = firstResult[0];
+String summary = firstResult[1];
+String body = firstResult[2];
+String authorName = firstResult[3];</programlisting>
+ </example>
+
+ <para>Hibernate Search extracts the properties from the Lucene index
+ and convert them back to their object representation, returning a list
+ of <classname>Object[]</classname>. Projections avoid a potential
+ database round trip (useful if the query response time is critical),
+ but has some constraints:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>the properties projected must be stored in the index
+ (<literal>@Field(store=Store.YES)</literal>), which increase the
+ index size</para>
+ </listitem>
+
+ <listitem>
+ <para>the properties projected must use a
+ <literal>FieldBridge</literal> implementing
+ <classname>org.hibernate.search.bridge.TwoWayFieldBridge</classname>
+ or
+ <literal>org.hibernate.search.bridge.TwoWayStringBridge</literal>,
+ the latter being the simpler version. All Hibernate Search
+ built-in types are two-way.</para>
+ </listitem>
+
+ <listitem>
+ <para>you can only project simple properties of the indexed entity
+ or its embedded associations. This means you cannot project a
+ whole embedded entity.</para>
+ </listitem>
+
+ <listitem>
+ <para>projection does not work on collections or maps which are
+ indexed via <classname>@IndexedEmbedded</classname></para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Projection is useful for another kind of use cases. Lucene
+ provides some metadata information to the user about the results. By
+ using some special placeholders, the projection mechanism can retrieve
+ them:</para>
+
+ <example>
+ <title>Using projection in order to retrieve meta data</title>
+
+ <programlisting>org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );
+query.<emphasis role="bold">setProjection( FullTextQuery.SCORE, FullTextQuery.THIS, "mainAuthor.name" )</emphasis>;
+List results = query.list();
+Object[] firstResult = (Object[]) results.get(0);
+float score = firstResult[0];
+Book book = firstResult[1];
+String authorName = firstResult[2];</programlisting>
+ </example>
+
+ <para>You can mix and match regular fields and special placeholders.
+ Here is the list of available placeholders:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>FullTextQuery.THIS: returns the initialized and managed
+ entity (as a non projected query would have done).</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.DOCUMENT: returns the Lucene Document related
+ to the object projected.</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.OBJECT_CLASS: returns the class of the indexed
+ entity.</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.SCORE: returns the document score in the
+ query. Scores are handy to compare one result against an other for
+ a given query but are useless when comparing the result of
+ different queries.</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.ID: the id property value of the projected
+ object.</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.DOCUMENT_ID: the Lucene document id. Careful,
+ Lucene document id can change overtime between two different
+ IndexReader opening (this feature is experimental).</para>
+ </listitem>
+
+ <listitem>
+ <para>FullTextQuery.EXPLANATION: returns the Lucene Explanation
+ object for the matching object/document in the given query. Do not
+ use if you retrieve a lot of data. Running explanation typically
+ is as costly as running the whole Lucene query per matching
+ element. Make sure you use projection!</para>
+ </listitem>
+ </itemizedlist>
+ </section>
+ </section>
+ </section>
+
+ <section>
+ <title>Retrieving the results</title>
+
+ <para>Once the Hibernate Search query is built, executing it is in no way
+ different than executing a HQL or Criteria query. The same paradigm and
+ object semantic applies. All the common operations are available:
+ <methodname>list()</methodname>, <methodname>uniqueResult()</methodname>,
+ <methodname>iterate()</methodname>,
+ <methodname>scroll()</methodname>.</para>
+
+ <section>
+ <title>Performance considerations</title>
+
+ <para>If you expect a reasonable number of results (for example using
+ pagination) and expect to work on all of them,
+ <methodname>list()</methodname> or
+ <methodname>uniqueResult()</methodname> are recommended.
+ <methodname>list()</methodname> work best if the entity
+ <literal>batch-size</literal> is set up properly. Note that Hibernate
+ Search has to process all Lucene Hits elements (within the pagination)
+ when using <methodname>list()</methodname> ,
+ <methodname>uniqueResult()</methodname> and
+ <methodname>iterate()</methodname>.</para>
+
+ <para>If you wish to minimize Lucene document loading,
+ <methodname>scroll()</methodname> is more appropriate. Don't forget to
+ close the <classname>ScrollableResults</classname> object when you're
+ done, since it keeps Lucene resources. If you expect to use
+ <methodname>scroll,</methodname> but wish to load objects in batch, you
+ can use <methodname>query.setFetchSize()</methodname>. When an object is
+ accessed, and if not already loaded, Hibernate Search will load the next
+ <literal>fetchSize</literal> objects in one pass.</para>
+
+ <para>Pagination is a preferred method over scrolling though.</para>
+ </section>
+
+ <section>
+ <title>Result size</title>
+
+ <para>It is sometime useful to know the total number of matching
+ documents:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>for the Google-like feature 1-10 of about 888,000,000</para>
+ </listitem>
+
+ <listitem>
+ <para>to implement a fast pagination navigation</para>
+ </listitem>
+
+ <listitem>
+ <para>to implement a multi step search engine (adding approximation
+ if the restricted query return no or not enough results)</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Of course it would be too costly to retrieve all the matching
+ documents. Hibernate Search allows you to retrieve the total number of
+ matching documents regardless of the pagination parameters. Even more
+ interesting, you can retrieve the number of matching elements without
+ triggering a single object load.</para>
+
+ <example>
+ <title>Determining the result size of a query</title>
+
+ <programlisting>org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );
+assert 3245 == <emphasis role="bold">query.getResultSize()</emphasis>; //return the number of matching books without loading a single one
+
+org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );
+query.setMaxResult(10);
+List results = query.list();
+assert 3245 == <emphasis role="bold">query.getResultSize()</emphasis>; //return the total number of matching books regardless of pagination</programlisting>
+ </example>
+
+ <note>
+ <para>Like Google, the number of results is approximative if the index
+ is not fully up-to-date with the database (asynchronous cluster for
+ example).</para>
+ </note>
+ </section>
+
+ <section>
+ <title>ResultTransformer</title>
+
+ <para>Especially when using projection, the data structure returned by a
+ query (an object array in this case), is not always matching the
+ application needs. It is possible to apply a
+ <classname>ResultTransformer</classname> operation post query to match
+ the targeted data structure:</para>
+
+ <example>
+ <title>Using ResultTransformer in conjunction with projections</title>
+
+ <programlisting>org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );
+query.setProjection( "title", "mainAuthor.name" );
+
+<emphasis role="bold">query.setResultTransformer(
+ new StaticAliasToBeanResultTransformer( BookView.class, "title", "author" )
+);</emphasis>
+List<BookView> results = (List<BookView>) query.list();
+for(BookView view : results) {
+ log.info( "Book: " + view.getTitle() + ", " + view.getAuthor() );
+}</programlisting>
+ </example>
+
+ <para>Examples of <classname>ResultTransformer</classname>
+ implementations can be found in the Hibernate Core codebase.</para>
+ </section>
+
+ <section>
+ <title>Understanding results</title>
+
+ <para>You will find yourself sometimes puzzled by a result showing up in
+ a query or a result not showing up in a query. Luke is a great tool to
+ understand those mysteries. However, Hibernate Search also gives you
+ access to the Lucene <classname>Explanation</classname> object for a
+ given result (in a given query). This class is considered fairly
+ advanced to Lucene users but can provide a good understanding of the
+ scoring of an object. You have two ways to access the Explanation object
+ for a given result:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>Use the <methodname>fullTextQuery.explain(int)</methodname>
+ method</para>
+ </listitem>
+
+ <listitem>
+ <para>Use projection</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>The first approach takes a document id as a parameter and return
+ the Explanation object. The document id can be retrieved using
+ projection and the <literal>FullTextQuery.DOCUMENT_ID</literal>
+ constant.</para>
+
+ <warning>
+ <para>The Document id has nothing to do with the entity id. Do not
+ mess up these two notions.</para>
+ </warning>
+
+ <para>The second approach let's you project the
+ <classname>Explanation</classname> object using the
+ <literal>FullTextQuery.EXPLANATION</literal> constant.</para>
+
+ <example>
+ <title>Retrieving the Lucene Explanation object using
+ projection</title>
+
+ <programlisting>FullTextQuery ftQuery = s.createFullTextQuery( luceneQuery, Dvd.class )
+ .setProjection( FullTextQuery.DOCUMENT_ID, <emphasis role="bold">FullTextQuery.EXPLANATION</emphasis>, FullTextQuery.THIS );
+@SuppressWarnings("unchecked") List<Object[]> results = ftQuery.list();
+for (Object[] result : results) {
+ Explanation e = (Explanation) result[1];
+ display( e.toString() );
+}</programlisting>
+ </example>
+
+ <para>Be careful, building the explanation object is quite expensive, it
+ is roughly as expensive as running the Lucene query again. Don't do it
+ if you don't need the object</para>
+ </section>
+ </section>
+
+ <section id="query-filter">
+ <title>Filters</title>
+
+ <para>Apache Lucene has a powerful feature that allows to filter query
+ results according to a custom filtering process. This is a very powerful
+ way to apply additional data restrictions, especially since filters can be
+ cached and reused. Some interesting use cases are:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>security</para>
+ </listitem>
+
+ <listitem>
+ <para>temporal data (eg. view only last month's data)</para>
+ </listitem>
+
+ <listitem>
+ <para>population filter (eg. search limited to a given
+ category)</para>
+ </listitem>
+
+ <listitem>
+ <para>and many more</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Hibernate Search pushes the concept further by introducing the
+ notion of parameterizable named filters which are transparently cached.
+ For people familiar with the notion of Hibernate Core filters, the API is
+ very similar:</para>
+
+ <example>
+ <title>Enabling fulltext filters for a given query</title>
+
+ <programlisting>fullTextQuery = s.createFullTextQuery( query, Driver.class );
+fullTextQuery.enableFullTextFilter("bestDriver");
+fullTextQuery.enableFullTextFilter("security").setParameter( "login", "andre" );
+fullTextQuery.list(); //returns only best drivers where andre has credentials</programlisting>
+ </example>
+
+ <para>In this example we enabled two filters on top of the query. You can
+ enable (or disable) as many filters as you like.</para>
+
+ <para>Declaring filters is done through the
+ <classname>@FullTextFilterDef</classname> annotation. This annotation can
+ be on any <literal>@Indexed</literal> entity regardless of the query the
+ filter is later applied to. This implies that filter definitions are
+ global and their names must be unique. A
+ <classname>SearchException</classname> is thrown in case two different
+ <classname>@FullTextFilterDef</classname> annotations with the same name
+ are defined. Each named filter has to specify its actual filter
+ implementation.</para>
+
+ <example>
+ <title>Defining and implementing a Filter</title>
+
+ <programlisting>@Entity
+@Indexed
+@FullTextFilterDefs( {
+ <emphasis role="bold">@FullTextFilterDef(name = "bestDriver", impl = BestDriversFilter.class)</emphasis>,
+ <emphasis role="bold">@FullTextFilterDef(name = "security", impl = SecurityFilterFactory.class)</emphasis>
+})
+public class Driver { ... }</programlisting>
+
+ <programlisting>public class BestDriversFilter extends <emphasis
+ role="bold">org.apache.lucene.search.Filter</emphasis> {
+
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );
+ TermDocs termDocs = reader.termDocs( new Term( "score", "5" ) );
+ while ( termDocs.next() ) {
+ bitSet.set( termDocs.doc() );
+ }
+ return bitSet;
+ }
+}</programlisting>
+ </example>
+
+ <para><classname>BestDriversFilter</classname> is an example of a simple
+ Lucene filter which reduces the result set to drivers whose score is 5. In
+ this example the specified filter implements the
+ <literal>org.apache.lucene.search.Filter</literal> directly and contains a
+ no-arg constructor.</para>
+
+ <para>If your Filter creation requires additional steps or if the filter
+ you want to use does not have a no-arg constructor, you can use the
+ factory pattern:</para>
+
+ <example>
+ <title>Creating a filter using the factory pattern</title>
+
+ <programlisting>@Entity
+@Indexed
+@FullTextFilterDef(name = "bestDriver", impl = BestDriversFilterFactory.class)
+public class Driver { ... }
+
+public class BestDriversFilterFactory {
+
+ <emphasis role="bold">@Factory</emphasis>
+ public Filter getFilter() {
+ //some additional steps to cache the filter results per IndexReader
+ Filter bestDriversFilter = new BestDriversFilter();
+ return new CachingWrapperFilter(bestDriversFilter);
+ }
+}</programlisting>
+ </example>
+
+ <para>Hibernate Search will look for a <literal>@Factory</literal>
+ annotated method and use it to build the filter instance. The factory must
+ have a no-arg constructor. For people familiar with JBoss Seam, this is
+ similar to the component factory pattern, but the annotation is
+ different!</para>
+
+ <para>Named filters come in handy where parameters have to be passed to
+ the filter. For example a security filter might want to know which
+ security level you want to apply:</para>
+
+ <example>
+ <title>Passing parameters to a defined filter</title>
+
+ <programlisting>fullTextQuery = s.createFullTextQuery( query, Driver.class );
+fullTextQuery.enableFullTextFilter("security")<emphasis role="bold">.setParameter( "level", 5 )</emphasis>;</programlisting>
+ </example>
+
+ <para>Each parameter name should have an associated setter on either the
+ filter or filter factory of the targeted named filter definition.</para>
+
+ <example>
+ <title>Using parameters in the actual filter implementation</title>
+
+ <programlisting>public class SecurityFilterFactory {
+ private Integer level;
+
+ /**
+ * injected parameter
+ */
+ <emphasis role="bold">public void setLevel(Integer level)</emphasis> {
+ this.level = level;
+ }
+
+ <emphasis role="bold">@Key
+ public FilterKey getKey()</emphasis> {
+ StandardFilterKey key = new StandardFilterKey();
+ key.addParameter( level );
+ return key;
+ }
+
+ @Factory
+ public Filter getFilter() {
+ Query query = new TermQuery( new Term("level", level.toString() ) );
+ return new CachingWrapperFilter( new QueryWrapperFilter(query) );
+ }
+}</programlisting>
+ </example>
+
+ <para>Note the method annotated <classname>@Key</classname> returning a
+ <classname>FilterKey</classname> object. The returned object has a special
+ contract: the key object must implement <methodname>equals()</methodname>
+ / <methodname>hashCode()</methodname> so that 2 keys are equal if and only
+ if the given <classname>Filter</classname> types are the same and the set
+ of parameters are the same. In other words, 2 filter keys are equal if and
+ only if the filters from which the keys are generated can be interchanged.
+ The key object is used as a key in the cache mechanism.</para>
+
+ <para><classname>@Key</classname> methods are needed only if:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>you enabled the filter caching system (enabled by
+ default)</para>
+ </listitem>
+
+ <listitem>
+ <para>your filter has parameters</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>In most cases, using the <literal>StandardFilterKey</literal>
+ implementation will be good enough. It delegates the
+ <methodname>equals()</methodname> / <methodname>hashCode()</methodname>
+ implementation to each of the parameters equals and hashcode
+ methods.</para>
+
+ <para>As mentioned before the defined filters are per default cached and
+ the cache uses a combination of hard and soft references to allow disposal
+ of memory when needed. The hard reference cache keeps track of the most
+ recently used filters and transforms the ones least used to
+ <classname>SoftReferences</classname> when needed. Once the limit of the
+ hard reference cache is reached additional filters are cached as
+ <classname>SoftReferences</classname>. To adjust the size of the hard
+ reference cache, use
+ <literal>hibernate.search.filter.cache_strategy.size</literal> (defaults
+ to 128). For advanced use of filter caching, you can implement your own
+ <classname>FilterCachingStrategy</classname>. The classname is defined by
+ <literal>hibernate.search.filter.cache_strategy</literal>.</para>
+
+ <para>This filter caching mechanism should not be confused with caching
+ the actual filter results. In Lucene it is common practice to wrap filters
+ using the <classname>IndexReader</classname> around a
+ <classname>CachingWrapperFilter.</classname> The wrapper will cache the
+ <classname>DocIdSet</classname> returned from the
+ <methodname>getDocIdSet(IndexReader reader)</methodname> method to avoid
+ expensive recomputation. It is important to mention that the computed
+ <classname>DocIdSet</classname> is only cachable for the same
+ <classname>IndexReader</classname> instance, because the reader
+ effectively represents the state of the index at the moment it was opened.
+ The document list cannot change within an opened
+ <classname>IndexReader</classname>. A different/new<classname>
+ IndexReader</classname> instance, however, works potentially on a
+ different set of <classname>Document</classname>s (either from a different
+ index or simply because the index has changed), hence the cached
+ <classname>DocIdSet</classname> has to be recomputed.</para>
+
+ <para>Hibernate Search also helps with this aspect of caching. Per default
+ the <literal>cache</literal> flag of <classname>@FullTextFilterDef
+ </classname>is set to
+ <literal>FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS</literal> which
+ will automatically cache the filter instance as well as wrap the specified
+ filter around a Hibernate specific implementation of
+ <classname>CachingWrapperFilter</classname>
+ (<classname>org.hibernate.search.filter.CachingWrapperFilter</classname>).
+ In contrast to Lucene's version of this class
+ <classname>SoftReference</classname>s are used together with a hard
+ reference count (see discussion about filter cache). The hard reference
+ count can be adjusted using
+ <literal>hibernate.search.filter.cache_docidresults.size</literal>
+ (defaults to 5). The wrapping behaviour can be controlled using the
+ <literal>@FullTextFilterDef.cache</literal> parameter. There are three
+ different values for this parameter:</para>
+
+ <para><informaltable align="left" width="">
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry align="center">Value</entry>
+
+ <entry align="center">Definition</entry>
+ </row>
+ </thead>
+
+ <tbody>
+ <row>
+ <entry align="left">FilterCacheModeType.NONE</entry>
+
+ <entry>No filter instance and no result is cached by Hibernate
+ Search. For every filter call, a new filter instance is created.
+ This setting might be useful for rapidly changing data sets or
+ heavily memory constrained environments.</entry>
+ </row>
+
+ <row>
+ <entry align="left">FilterCacheModeType.INSTANCE_ONLY</entry>
+
+ <entry>The filter instance is cached and reused across
+ concurrent <methodname>Filter.getDocIdSet()</methodname> calls.
+ <classname>DocIdSet</classname> results are not cached. This
+ setting is useful when a filter uses its own specific caching
+ mechanism or the filter results change dynamically due to
+ application specific events making
+ <classname>DocIdSet</classname> caching in both cases
+ unnecessary.</entry>
+ </row>
+
+ <row>
+ <entry
+ align="left">FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS</entry>
+
+ <entry>Both the filter instance and the
+ <classname>DocIdSet</classname> results are cached. This is the
+ default value.</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>Last but not least - why should filters be cached? There
+ are two areas where filter caching shines:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>the system does not update the targeted entity index often (in
+ other words, the IndexReader is reused a lot)</para>
+ </listitem>
+
+ <listitem>
+ <para>the Filter's DocIdSet is expensive to compute (compared to the
+ time spent to execute the query)</para>
+ </listitem>
+ </itemizedlist>
+
+ <section id="query-filter-shard">
+ <title>Using filters in a sharded environment</title>
+
+ <para>It is possible, in a sharded environment to execute queries on a
+ subset of the available shards. This can be done in two steps:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>create a sharding strategy that does select a subset of
+ <classname>DirectoryProvider</classname>s depending on sone filter
+ configuration</para>
+ </listitem>
+
+ <listitem>
+ <para>activate the proper filter at query time</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Let's first look at an example of sharding strategy that query on
+ a specific customer shard if the customer filter is activated.</para>
+
+ <programlisting>public class CustomerShardingStrategy implements IndexShardingStrategy {
+
+ // stored DirectoryProviders in a array indexed by customerID
+ private DirectoryProvider<?>[] providers;
+
+ public void initialize(Properties properties, DirectoryProvider<?>[] providers) {
+ this.providers = providers;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForAllShards() {
+ return providers;
+ }
+
+ public DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document) {
+ Integer customerID = Integer.parseInt(document.getField("customerID").stringValue());
+ return providers[customerID];
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString) {
+ return getDirectoryProvidersForAllShards();
+ }
+
+<emphasis role="bold"> /**
+ * Optimization; don't search ALL shards and union the results; in this case, we
+ * can be certain that all the data for a particular customer Filter is in a single
+ * shard; simply return that shard by customerID.
+ */
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] filters) {
+ FFullTextFilter filter = getCustomerFilter(filters, "customer");
+ if (filter == null) {
+ return getDirectoryProvidersForAllShards();
+ }
+ else {
+ return new DirectoryProvider[] { providers[Integer.parseInt(filter.getParameter("customerID").toString())] };
+ }
+ }
+
+ private FullTextFilter getFilter(FullTextFilterImplementor[] filters, String name) {
+ for (FullTextFilterImplementor filter: filters) {
+ if (filter.getName().equals(name)) return filter;
+ }
+ return null;
+ }</emphasis>
+
+}</programlisting>
+
+ <para>In this example, if the filter named <literal>customer</literal>
+ is present, we make sure to only use the shard dedicated to this
+ customer. Otherwise, we return all shards. A given Sharding strategy can
+ react to one or more filters and depends on their parameters.</para>
+
+ <para>The second step is simply to activate the filter at query time.
+ While the filter can be a regular filter (as defined in <xref
+ linkend="query-filter" />) which also filters Lucene results after the
+ query, you can make use of a special filter that will only be passed to
+ the sharding strategy and otherwise ignored for the rest of the query.
+ Simply use the <classname>ShardSensitiveOnlyFilter</classname> class
+ when declaring your filter.</para>
+
+ <programlisting>@Entity @Indexed
+<emphasis role="bold">@FullTextFilterDef(name="customer", impl=ShardSensitiveOnlyFilter.class)</emphasis>
+public class Customer {
+ ...
+}
+
+
+FullTextQuery query = ftEm.createFullTextQuery(luceneQuery, Customer.class);
+<emphasis role="bold">query.enableFulltextFilter("customer").setParameter("CustomerID", 5);</emphasis>
+@SuppressWarnings("unchecked")
+List<Customer> results = query.getResultList();</programlisting>
+
+ <para>Note that by using the
+ <classname>ShardSensitiveOnlyFilter</classname>, you do not have to
+ implement any Lucene filter. Using filters and sharding strategy
+ reacting to these filters is recommended to speed up queries in a
+ sharded environment.</para>
+ </section>
+ </section>
+
+ <section>
+ <title>Optimizing the query process</title>
+
+ <para>Query performance depends on several criteria:</para>
+
+ <itemizedlist>
+ <listitem>
+ <para>the Lucene query itself: read the literature on this
+ subject</para>
+ </listitem>
+
+ <listitem>
+ <para>the number of object loaded: use pagination (always ;-) ) or
+ index projection (if needed)</para>
+ </listitem>
+
+ <listitem>
+ <para>the way Hibernate Search interacts with the Lucene readers:
+ defines the appropriate <xref
+ linkend="search-architecture-readerstrategy" />.</para>
+ </listitem>
+ </itemizedlist>
+ </section>
+
+ <section>
+ <title>Native Lucene Queries</title>
+
+ <para>If you wish to use some specific features of Lucene, you can always
+ run Lucene specific queries. Check <xref linkend="search-lucene-native" />
+ for more information.</para>
+ </section>
+</chapter>
\ No newline at end of file
Property changes on: search/trunk/hibernate-search/src/main/docbook/en-US/modules/query.xml
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/master.pot
===================================================================
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/master.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/architecture.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/architecture.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/architecture.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,268 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: architecture.xml:30
+#, no-c-format
+msgid "Architecture"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:33
+#, no-c-format
+msgid "Overview"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:35
+#, no-c-format
+msgid "Hibernate Search consists of an indexing component and an index search component. Both are backed by Apache Lucene."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:38
+#, no-c-format
+msgid "Each time an entity is inserted, updated or removed in/from the database, Hibernate Search keeps track of this event (through the Hibernate event system) and schedules an index update. All the index updates are handled without you having to use the Apache Lucene APIs (see <xref linkend=\"search-configuration-event\"/>)."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:44
+#, no-c-format
+msgid "To interact with Apache Lucene indexes, Hibernate Search has the notion of <classname>DirectoryProvider</classname>s. A directory provider will manage a given Lucene <classname>Directory</classname> type. You can configure directory providers to adjust the directory target (see <xref linkend=\"search-configuration-directory\"/>)."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:50
+#, no-c-format
+msgid "Hibernate Search uses the Lucene index to search an entity and return a list of managed entities saving you the tedious object to Lucene document mapping. The same persistence context is shared between Hibernate and Hibernate Search. As a matter of fact, the <classname>FullTextSession</classname> is built on top of the Hibernate Session. so that the application code can use the unified <classname>org.hibernate.Query</classname> or <classname>javax.persistence.Query</classname> APIs exactly the way a HQL, JPA-QL or native queries would do."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:60
+#, no-c-format
+msgid "To be more efficient, Hibernate Search batches the write interactions with the Lucene index. There is currently two types of batching depending on the expected scope. Outside a transaction, the index update operation is executed right after the actual database operation. This scope is really a no scoping setup and no batching is performed. However, it is recommended - for both your database and Hibernate Search - to execute your operation in a transaction be it JDBC or JTA. When in a transaction, the index update operation is scheduled for the transaction commit phase and discarded in case of transaction rollback. The batching scope is the transaction. There are two immediate benefits:"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:73
+#, no-c-format
+msgid "Performance: Lucene indexing works better when operation are executed in batch."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:78
+#, no-c-format
+msgid "ACIDity: The work executed has the same scoping as the one executed by the database transaction and is executed if and only if the transaction is committed. This is not ACID in the strict sense of it, but ACID behavior is rarely useful for full text search indexes since they can be rebuilt from the source at any time."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:86
+#, no-c-format
+msgid "You can think of those two scopes (no scope vs transactional) as the equivalent of the (infamous) autocommit vs transactional behavior. From a performance perspective, the <emphasis>in transaction</emphasis> mode is recommended. The scoping choice is made transparently. Hibernate Search detects the presence of a transaction and adjust the scoping."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:92
+#, no-c-format
+msgid "Hibernate Search works perfectly fine in the Hibernate / EntityManager long conversation pattern aka. atomic conversation."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:96
+#, no-c-format
+msgid "Depending on user demand, additional scoping will be considered, the pluggability mechanism being already in place."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:102
+#, no-c-format
+msgid "Back end"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:104
+#, no-c-format
+msgid "Hibernate Search offers the ability to let the scoped work being processed by different back ends. Two back ends are provided out of the box for two different scenarios."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:109
+#, no-c-format
+msgid "Back end types"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:112
+#, no-c-format
+msgid "Lucene"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:114
+#, no-c-format
+msgid "In this mode, all index update operations applied on a given node (JVM) will be executed to the Lucene directories (through the directory providers) by the same node. This mode is typically used in non clustered environment or in clustered environments where the directory store is shared."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:132
+#, no-c-format
+msgid "Lucene back end configuration."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:136
+#, no-c-format
+msgid "This mode targets non clustered applications, or clustered applications where the Directory is taking care of the locking strategy."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:140
+#, no-c-format
+msgid "The main advantage is simplicity and immediate visibility of the changes in Lucene queries (a requirement in some applications)."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:145
+#, no-c-format
+msgid "<title>JMS</title>"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:147
+#, no-c-format
+msgid "All index update operations applied on a given node are sent to a JMS queue. A unique reader will then process the queue and update the master index. The master index is then replicated on a regular basis to the slave copies. This is known as the master/slaves pattern. The master is the sole responsible for updating the Lucene index. The slaves can accept read as well as write operations. However, they only process the read operation on their local index copy and delegate the update operations to the master."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:167
+#, no-c-format
+msgid "JMS back end configuration."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:171
+#, no-c-format
+msgid "This mode targets clustered environments where throughput is critical, and index update delays are affordable. Reliability is ensured by the JMS provider and by having the slaves working on a local copy of the index."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:177
+#, no-c-format
+msgid "Hibernate Search is an extensible architecture. Feel free to drop ideas for other third party back ends to <literal>hibernate-dev(a)lists.jboss.org</literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:183
+#, no-c-format
+msgid "Work execution"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:185
+#, no-c-format
+msgid "The indexing work (done by the back end) can be executed synchronously with the transaction commit (or update operation if out of transaction), or asynchronously."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:190
+#, no-c-format
+msgid "Synchronous"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:192
+#, no-c-format
+msgid "This is the safe mode where the back end work is executed in concert with the transaction commit. Under highly concurrent environment, this can lead to throughput limitations (due to the Apache Lucene lock mechanism) and it can increase the system response time if the backend is significantly slower than the transactional process and if a lot of IO operations are involved."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:201
+#, no-c-format
+msgid "Asynchronous"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:203
+#, no-c-format
+msgid "This mode delegates the work done by the back end to a different thread. That way, throughput and response time are (to a certain extend) decorrelated from the back end performance. The drawback is that a small delay appears between the transaction commit and the index update and a small overhead is introduced to deal with thread management."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:210
+#, no-c-format
+msgid "It is recommended to use synchronous execution first and evaluate asynchronous execution if performance problems occur and after having set up a proper benchmark (ie not a lonely cowboy hitting the system in a completely unrealistic way)."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:219
+#, no-c-format
+msgid "Reader strategy"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:221
+#, no-c-format
+msgid "When executing a query, Hibernate Search interacts with the Apache Lucene indexes through a reader strategy. Choosing a reader strategy will depend on the profile of the application (frequent updates, read mostly, asynchronous index update etc). See also"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:228
+#, no-c-format
+msgid "Shared"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:230
+#, no-c-format
+msgid "With this strategy, Hibernate Search will share the same <classname>IndexReader</classname>, for a given Lucene index, across multiple queries and threads provided that the <classname>IndexReader</classname> is still up-to-date. If the <classname>IndexReader</classname> is not up-to-date, a new one is opened and provided. Each <classname>IndexReader</classname> is made of several <classname>SegmentReader</classname>s. This strategy only reopens segments that have been modified or created after last opening and shares the already loaded segments from the previous instance. This strategy is the default."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:241
+#, no-c-format
+msgid "The name of this strategy is <literal>shared</literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:245
+#, no-c-format
+msgid "Not-shared"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:247
+#, no-c-format
+msgid "Every time a query is executed, a Lucene <classname>IndexReader</classname> is opened. This strategy is not the most efficient since opening and warming up an <classname>IndexReader</classname> can be a relatively expensive operation."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:253
+#, no-c-format
+msgid "The name of this strategy is <literal>not-shared</literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:257
+#, no-c-format
+msgid "Custom"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:259
+#, no-c-format
+msgid "You can write your own reader strategy that suits your application needs by implementing <classname>org.hibernate.search.reader.ReaderProvider</classname>. The implementation must be thread safe."
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/architecture.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/batchindex.pot
===================================================================
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/batchindex.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/configuration.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/configuration.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/configuration.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1265 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: configuration.xml:30
+#, no-c-format
+msgid "Configuration"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:33
+#, no-c-format
+msgid "Directory configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:35
+#, no-c-format
+msgid "Apache Lucene has a notion of <literal>Directory</literal> to store the index files. The <classname>Directory</classname> implementation can be customized, but Lucene comes bundled with a file system (<literal>FSDirectoryProvider</literal>) and an in memory (<literal>RAMDirectoryProvider</literal>) implementation. <literal>DirectoryProvider</literal>s are the Hibernate Search abstraction around a Lucene <classname>Directory</classname> and handle the configuration and the initialization of the underlying Lucene resources. <xref linkend=\"directory-provider-table\"/> shows the list of the directory providers bundled with Hibernate Search."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:47
+#, no-c-format
+msgid "List of built-in Directory Providers"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:52 configuration.xml:811
+#, no-c-format
+msgid "Class"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:54 configuration.xml:349 configuration.xml:635 configuration.xml:813
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:56
+#, no-c-format
+msgid "Properties"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:62
+#, no-c-format
+msgid "org.hibernate.search.store.RAMDirectoryProvider"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:64
+#, no-c-format
+msgid "Memory based directory, the directory will be uniquely identified (in the same deployment unit) by the <literal>@Indexed.index</literal> element"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:68 configuration.xml:868
+#, no-c-format
+msgid "none"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:72
+#, no-c-format
+msgid "org.hibernate.search.store.FSDirectoryProvider"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:74
+#, no-c-format
+msgid "File system based directory. The directory used will be <indexBase>/< indexName >"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:77
+#, no-c-format
+msgid "<literal>indexBase</literal> : Base directory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:78 configuration.xml:102 configuration.xml:136
+#, no-c-format
+msgid "<literal>indexName</literal>: override @Indexed.index (useful for sharded indexes)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:79 configuration.xml:113 configuration.xml:147
+#, no-c-format
+msgid "<literal> locking_strategy</literal> : optional, see"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:86
+#, no-c-format
+msgid "org.hibernate.search.store.FSMasterDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:88
+#, no-c-format
+msgid "File system based directory. Like FSDirectoryProvider. It also copies the index to a source directory (aka copy directory) on a regular basis."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:91 configuration.xml:125
+#, no-c-format
+msgid "The recommended value for the refresh period is (at least) 50% higher that the time to copy the information (default 3600 seconds - 60 minutes)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:93 configuration.xml:127
+#, no-c-format
+msgid "Note that the copy is based on an incremental copy mechanism reducing the average copy time."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:95
+#, no-c-format
+msgid "DirectoryProvider typically used on the master node in a JMS back end cluster."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:96 configuration.xml:130
+#, no-c-format
+msgid "The <literal> buffer_size_on_copy</literal> optimum depends on your operating system and available RAM; most people reported good results using values between 16 and 64MB."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:101 configuration.xml:135
+#, no-c-format
+msgid "<literal>indexBase</literal>: Base directory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:104 configuration.xml:138
+#, no-c-format
+msgid "<literal>sourceBase</literal>: Source (copy) base directory."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:105 configuration.xml:139
+#, no-c-format
+msgid "<literal>source</literal>: Source directory suffix (default to <literal>@Indexed.index</literal>). The actual source directory name being <filename><sourceBase>/<source></filename>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:109 configuration.xml:143
+#, no-c-format
+msgid "<literal>refresh</literal>: refresh period in second (the copy will take place every refresh seconds)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:110 configuration.xml:144
+#, no-c-format
+msgid "<literal>buffer_size_on_copy</literal>: The amount of MegaBytes to move in a single low level copy instruction; defaults to 16MB."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:120
+#, no-c-format
+msgid "org.hibernate.search.store.FSSlaveDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:122
+#, no-c-format
+msgid "File system based directory. Like FSDirectoryProvider, but retrieves a master version (source) on a regular basis. To avoid locking and inconsistent search results, 2 local copies are kept."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:129
+#, no-c-format
+msgid "DirectoryProvider typically used on slave nodes using a JMS back end."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:156
+#, no-c-format
+msgid "If the built-in directory providers do not fit your needs, you can write your own directory provider by implementing the <classname>org.hibernate.store.DirectoryProvider</classname> interface."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:161
+#, no-c-format
+msgid "Each indexed entity is associated to a Lucene index (an index can be shared by several entities but this is not usually the case). You can configure the index through properties prefixed by <constant>hibernate.search.</constant><replaceable>indexname</replaceable> . Default properties inherited to all indexes can be defined using the prefix <constant>hibernate.search.default.</constant>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:168
+#, no-c-format
+msgid "To define the directory provider of a given index, you use the <constant>hibernate.search.<replaceable>indexname</replaceable>.directory_provider </constant>"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:173
+#, no-c-format
+msgid "Configuring directory providers"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:175
+#, no-c-format
+msgid ""
+ "hibernate.search.default.directory_provider org.hibernate.search.store.FSDirectoryProvider\n"
+ "hibernate.search.default.indexBase=/usr/lucene/indexes\n"
+ "hibernate.search.Rules.directory_provider org.hibernate.search.store.RAMDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:178
+#, no-c-format
+msgid "applied on"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:181
+#, no-c-format
+msgid "Specifying the index name using the <literal>index</literal> parameter of <classname>@Indexed</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:184
+#, no-c-format
+msgid ""
+ "@Indexed(index=\"Status\")\n"
+ "public class Status { ... }\n"
+ "\n"
+ "@Indexed(index=\"Rules\")\n"
+ "public class Rule { ... }"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:187
+#, no-c-format
+msgid "will create a file system directory in <filename>/usr/lucene/indexes/Status</filename> where the Status entities will be indexed, and use an in memory directory named <literal>Rules</literal> where Rule entities will be indexed."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:192
+#, no-c-format
+msgid "You can easily define common rules like the directory provider and base directory, and override those defaults later on on a per index basis."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:196
+#, no-c-format
+msgid "Writing your own <classname>DirectoryProvider</classname>, you can utilize this configuration mechanism as well."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:201
+#, no-c-format
+msgid "Sharding indexes"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:203
+#, no-c-format
+msgid "In some extreme cases involving huge indexes (in size), it is necessary to split (shard) the indexing data of a given entity type into several Lucene indexes. This solution is not recommended until you reach significant index sizes and index update times are slowing the application down. The main drawback of index sharding is that searches will end up being slower since more files have to be opened for a single search. In other words don't do it until you have problems :)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:211
+#, no-c-format
+msgid "Despite this strong warning, Hibernate Search allows you to index a given entity type into several sub indexes. Data is sharded into the different sub indexes thanks to an <classname>IndexShardingStrategy</classname>. By default, no sharding strategy is enabled, unless the number of shards is configured. To configure the number of shards use the following property"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:219
+#, no-c-format
+msgid "Enabling index sharding by specifying nbr_of_shards for a specific index"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:222
+#, no-c-format
+msgid "hibernate.search.<indexName>.sharding_strategy.nbr_of_shards 5"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:225
+#, no-c-format
+msgid "This will use 5 different shards."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:227
+#, no-c-format
+msgid "The default sharding strategy, when shards are set up, splits the data according to the hash value of the id string representation (generated by the Field Bridge). This ensures a fairly balanced sharding. You can replace the strategy by implementing <literal>IndexShardingStrategy</literal> and by setting the following property"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:235
+#, no-c-format
+msgid "Specifying a custom sharding strategy"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:237
+#, no-c-format
+msgid "hibernate.search.<indexName>.sharding_strategy my.shardingstrategy.Implementation"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:240
+#, no-c-format
+msgid "Each shard has an independent directory provider configuration as described in <xref linkend=\"search-configuration-directory\"/>. The DirectoryProvider default name for the previous example are <literal><indexName>.0</literal> to <literal><indexName>.4</literal>. In other words, each shard has the name of it's owning index followed by <constant>.</constant> (dot) and its index number."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:249
+#, no-c-format
+msgid "Configuring the sharding configuration for an example entity <classname>Animal</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:252
+#, no-c-format
+msgid ""
+ "hibernate.search.default.indexBase /usr/lucene/indexes\n"
+ "\n"
+ "hibernate.search.Animal.sharding_strategy.nbr_of_shards 5\n"
+ "hibernate.search.Animal.directory_provider org.hibernate.search.store.FSDirectoryProvider\n"
+ "hibernate.search.Animal.0.indexName Animal00\n"
+ "hibernate.search.Animal.3.indexBase /usr/lucene/sharded\n"
+ "hibernate.search.Animal.3.indexName Animal03"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:255
+#, no-c-format
+msgid "This configuration uses the default id string hashing strategy and shards the Animal index into 5 subindexes. All subindexes are <classname>FSDirectoryProvider</classname> instances and the directory where each subindex is stored is as followed:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:262
+#, no-c-format
+msgid "for subindex 0: /usr/lucene/indexes/Animal00 (shared indexBase but overridden indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:267
+#, no-c-format
+msgid "for subindex 1: /usr/lucene/indexes/Animal.1 (shared indexBase, default indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:272
+#, no-c-format
+msgid "for subindex 2: /usr/lucene/indexes/Animal.2 (shared indexBase, default indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:277
+#, no-c-format
+msgid "for subindex 3: /usr/lucene/shared/Animal03 (overridden indexBase, overridden indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:282
+#, no-c-format
+msgid "for subindex 4: /usr/lucene/indexes/Animal.4 (shared indexBase, default indexName)"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:289
+#, no-c-format
+msgid "Sharing indexes (two entities into the same directory)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:292
+#, no-c-format
+msgid "This is only presented here so that you know the option is available. There is really not much benefit in sharing indexes."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:296
+#, no-c-format
+msgid "It is technically possible to store the information of more than one entity into a single Lucene index. There are two ways to accomplish this:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:302
+#, no-c-format
+msgid "Configuring the underlying directory providers to point to the same physical index directory. In practice, you set the property <literal>hibernate.search.[fully qualified entity name].indexName</literal> to the same value. As an example let’s use the same index (directory) for the <classname>Furniture</classname> and <classname>Animal</classname> entity. We just set <literal>indexName</literal> for both entities to for example “Animal”. Both entities will then be stored in the Animal directory"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:312
+#, no-c-format
+msgid ""
+ "<code>hibernate.search.org.hibernate.search.test.shards.Furniture.indexName = Animal\n"
+ "hibernate.search.org.hibernate.search.test.shards.Animal.indexName = Animal</code>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:316
+#, no-c-format
+msgid "Setting the <code>@Indexed</code> annotation’s <methodname>index</methodname> attribute of the entities you want to merge to the same value. If we again wanted all <classname>Furniture</classname> instances to be indexed in the <classname>Animal</classname> index along with all instances of <classname>Animal</classname> we would specify <code>@Indexed(index=”Animal”)</code> on both <classname>Animal</classname> and <classname>Furniture</classname> classes."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:330
+#, no-c-format
+msgid "Worker configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:332
+#, no-c-format
+msgid "It is possible to refine how Hibernate Search interacts with Lucene through the worker configuration. The work can be executed to the Lucene directory or sent to a JMS queue for later processing. When processed to the Lucene directory, the work can be processed synchronously or asynchronously to the transaction commit."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:338
+#, no-c-format
+msgid "You can define the worker configuration using the following properties"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:342
+#, no-c-format
+msgid "worker configuration"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:347 configuration.xml:633
+#, no-c-format
+msgid "Property"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:353
+#, no-c-format
+msgid "hibernate.search.worker.backend"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:355
+#, no-c-format
+msgid "Out of the box support for the Apache Lucene back end and the JMS back end. Default to <literal>lucene</literal>. Supports also <literal>jms</literal> and <literal>blackhole</literal>."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:361
+#, no-c-format
+msgid "hibernate.search.worker.execution"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:363
+#, no-c-format
+msgid "Supports synchronous and asynchronous execution. Default to <literal><literal>sync</literal></literal>. Supports also <literal>async</literal>."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:369
+#, no-c-format
+msgid "hibernate.search.worker.thread_pool.size"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:371
+#, no-c-format
+msgid "Defines the number of threads in the pool. useful only for asynchronous execution. Default to 1."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:376
+#, no-c-format
+msgid "hibernate.search.worker.buffer_queue.max"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:378
+#, no-c-format
+msgid "Defines the maximal number of work queue if the thread poll is starved. Useful only for asynchronous execution. Default to infinite. If the limit is reached, the work is done by the main thread."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:385
+#, no-c-format
+msgid "hibernate.search.worker.jndi.*"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:387
+#, no-c-format
+msgid "Defines the JNDI properties to initiate the InitialContext (if needed). JNDI is only used by the JMS back end."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:392
+#, no-c-format
+msgid "hibernate.search.worker.jms.connection_factory"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:395
+#, no-c-format
+msgid "Mandatory for the JMS back end. Defines the JNDI name to lookup the JMS connection factory from (<literal>/ConnectionFactory</literal> by default in JBoss AS)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:402
+#, no-c-format
+msgid "hibernate.search.worker.jms.queue"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:404
+#, no-c-format
+msgid "Mandatory for the JMS back end. Defines the JNDI name to lookup the JMS queue from. The queue will be used to post work messages."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:414
+#, no-c-format
+msgid "JMS Master/Slave configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:416
+#, no-c-format
+msgid "This section describes in greater detail how to configure the Master / Slaves Hibernate Search architecture."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:429
+#, no-c-format
+msgid "JMS back end configuration."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:433
+#, no-c-format
+msgid "Slave nodes"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:435
+#, no-c-format
+msgid "Every index update operation is sent to a JMS queue. Index querying operations are executed on a local index copy."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:439
+#, no-c-format
+msgid "JMS Slave configuration"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:441
+#, no-c-format
+msgid ""
+ "### slave configuration\n"
+ "\n"
+ "## DirectoryProvider\n"
+ "# (remote) master location\n"
+ "hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/mastercopy\n"
+ "\n"
+ "# local copy location\n"
+ "hibernate.search.default.indexBase = /Users/prod/lucenedirs\n"
+ "\n"
+ "# refresh every half hour\n"
+ "hibernate.search.default.refresh = 1800\n"
+ "\n"
+ "# appropriate directory provider\n"
+ "hibernate.search.default.directory_provider = org.hibernate.search.store.FSSlaveDirectoryProvider\n"
+ "\n"
+ "## Backend configuration\n"
+ "hibernate.search.worker.backend = jms\n"
+ "hibernate.search.worker.jms.connection_factory = /ConnectionFactory\n"
+ "hibernate.search.worker.jms.queue = queue/hibernatesearch\n"
+ "#optional jndi configuration (check your JMS provider for more information)\n"
+ "\n"
+ "## Optional asynchronous execution strategy\n"
+ "# hibernate.search.worker.execution = async\n"
+ "# hibernate.search.worker.thread_pool.size = 2\n"
+ "# hibernate.search.worker.buffer_queue.max = 50"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:444
+#, no-c-format
+msgid "A file system local copy is recommended for faster search results."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:447 configuration.xml:463
+#, no-c-format
+msgid "The refresh period should be higher that the expected time copy."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:452
+#, no-c-format
+msgid "Master node"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:454
+#, no-c-format
+msgid "Every index update operation is taken from a JMS queue and executed. The master index is copied on a regular basis."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:458
+#, no-c-format
+msgid "JMS Master configuration"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:460
+#, no-c-format
+msgid ""
+ "### master configuration\n"
+ "\n"
+ "## DirectoryProvider\n"
+ "# (remote) master location where information is copied to\n"
+ "hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/mastercopy\n"
+ "\n"
+ "# local master location\n"
+ "hibernate.search.default.indexBase = /Users/prod/lucenedirs\n"
+ "\n"
+ "# refresh every half hour\n"
+ "hibernate.search.default.refresh = 1800\n"
+ "\n"
+ "# appropriate directory provider\n"
+ "hibernate.search.default.directory_provider = org.hibernate.search.store.FSMasterDirectoryProvider\n"
+ "\n"
+ "## Backend configuration\n"
+ "#Backend is the default lucene one"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:466
+#, no-c-format
+msgid "In addition to the Hibernate Search framework configuration, a Message Driven Bean should be written and set up to process the index works queue through JMS."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:471
+#, no-c-format
+msgid "Message Driven Bean processing the indexing queue"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:473
+#, no-c-format
+msgid ""
+ "@MessageDriven(activationConfig = {\n"
+ " @ActivationConfigProperty(propertyName=\"destinationType\", propertyValue=\"javax.jms.Queue\"),\n"
+ " @ActivationConfigProperty(propertyName=\"destination\", propertyValue=\"queue/hibernatesearch\"),\n"
+ " @ActivationConfigProperty(propertyName=\"DLQMaxResent\", propertyValue=\"1\")\n"
+ " } )\n"
+ "public class MDBSearchController extends AbstractJMSHibernateSearchController implements MessageListener {\n"
+ " @PersistenceContext EntityManager em;\n"
+ " \n"
+ " //method retrieving the appropriate session\n"
+ " protected Session getSession() {\n"
+ " return (Session) em.getDelegate();\n"
+ " }\n"
+ "\n"
+ " //potentially close the session opened in #getSession(), not needed here\n"
+ " protected void cleanSessionIfNeeded(Session session) \n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:476
+#, no-c-format
+msgid "This example inherits from the abstract JMS controller class available in the Hibernate Search source code and implements a JavaEE 5 MDB. This implementation is given as an example and, while most likely be more complex, can be adjusted to make use of non Java EE Message Driven Beans. For more information about the <methodname>getSession()</methodname> and <methodname>cleanSessionIfNeeded()</methodname>, please check <classname>AbstractJMSHibernateSearchController</classname>'s javadoc."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:489
+#, no-c-format
+msgid "Reader strategy configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:491
+#, no-c-format
+msgid "The different reader strategies are described in <xref linkend=\"search-architecture-readerstrategy\"/>. Out of the box strategies are:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:497
+#, no-c-format
+msgid "<literal>shared</literal>: share index readers across several queries. This strategy is the most efficient."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:502
+#, no-c-format
+msgid "<literal>not-shared</literal>: create an index reader for each individual query"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:507
+#, no-c-format
+msgid "The default reader strategy is <literal>shared</literal>. This can be adjusted:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:510
+#, no-c-format
+msgid "hibernate.search.reader.strategy = not-shared"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:512
+#, no-c-format
+msgid "Adding this property switches to the <literal>not-shared</literal> strategy."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:515
+#, no-c-format
+msgid "Or if you have a custom reader strategy:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:517
+#, no-c-format
+msgid "hibernate.search.reader.strategy = my.corp.myapp.CustomReaderProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:519
+#, no-c-format
+msgid "where <classname>my.corp.myapp.CustomReaderProvider</classname> is the custom strategy implementation."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:524
+#, no-c-format
+msgid "Enabling Hibernate Search and automatic indexing"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:527
+#, no-c-format
+msgid "Enabling Hibernate Search"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:529
+#, no-c-format
+msgid "Hibernate Search is enabled out of the box when using Hibernate Annotations or Hibernate EntityManager. If, for some reason you need to disable it, set <literal>hibernate.search.autoregister_listeners</literal> to false. Note that there is no performance penalty when the listeners are enabled even though no entities are indexed."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:536
+#, no-c-format
+msgid "To enable Hibernate Search in Hibernate Core (ie. if you don't use Hibernate Annotations), add the <literal>FullTextIndexEventListener</literal> for the following six Hibernate events and also add it after the default <literal>DefaultFlushEventListener</literal>, as in the following example."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:543
+#, no-c-format
+msgid "Explicitly enabling Hibernate Search by configuring the <classname>FullTextIndexEventListener</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:546
+#, no-c-format
+msgid ""
+ "<hibernate-configuration>\n"
+ " <session-factory>\n"
+ " ...\n"
+ " <event type=\"post-update\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"post-insert\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"post-delete\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"post-collection-recreate\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"post-collection-remove\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"post-collection-update\">\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " <event type=\"flush\">\n"
+ " <listener class=\"org.hibernate.event.def.DefaultFlushEventListener\"/>\n"
+ " <listener class=\"org.hibernate.search.event.FullTextIndexEventListener\"/>\n"
+ " </event>\n"
+ " </session-factory>\n"
+ "</hibernate-configuration>"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:551
+#, no-c-format
+msgid "Automatic indexing"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:553
+#, no-c-format
+msgid "By default, every time an object is inserted, updated or deleted through Hibernate, Hibernate Search updates the according Lucene index. It is sometimes desirable to disable that features if either your index is read-only or if index updates are done in a batch way (see <xref linkend=\"search-batchindex\"/>)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:559
+#, no-c-format
+msgid "To disable event based indexing, set"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:561
+#, no-c-format
+msgid "hibernate.search.indexing_strategy manual"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:564
+#, no-c-format
+msgid "In most case, the JMS backend provides the best of both world, a lightweight event based system keeps track of all changes in the system, and the heavyweight indexing process is done by a separate process or machine."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:573
+#, no-c-format
+msgid "Tuning Lucene indexing performance"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:575
+#, no-c-format
+msgid "Hibernate Search allows you to tune the Lucene indexing performance by specifying a set of parameters which are passed through to underlying Lucene <literal>IndexWriter</literal> such as <literal>mergeFactor</literal>, <literal>maxMergeDocs</literal> and <literal>maxBufferedDocs</literal>. You can specify these parameters either as default values applying for all indexes, on a per index basis, or even per shard."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:583
+#, no-c-format
+msgid "There are two sets of parameters allowing for different performance settings depending on the use case. During indexing operations triggered by database modifications, the parameters are grouped by the <literal>transaction</literal> keyword: <programlisting>hibernate.search.[default|<indexname>].indexwriter.transaction.<parameter_name></programlisting> When indexing occurs via <literal>FullTextSession.index()</literal> (see <xref linkend=\"search-batchindex\"/>), the used properties are those grouped under the <literal>batch</literal> keyword:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:589
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.batch.<parameter_name>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:591
+#, no-c-format
+msgid ""
+ "Unless the corresponding <literal>.batch</literal> property is explicitly set, the value will default to the <literal>.transaction</literal> property. If no value is set for a <literal>.batch</literal> value in a specific shard configuration, Hibernate Search will look at the index section, then at the default section and after that it will look for a <literal>.transaction</literal> in the same order: <programlisting>hibernate.search.Animals.2.indexwriter.transaction.max_merge_docs 10\n"
+ "hibernate.search.Animals.2.indexwriter.transaction.merge_factor 20\n"
+ "hibernate.search.default.indexwriter.batch.max_merge_docs 100</programlisting> This configuration will result in these settings applied to the second shard of Animals index:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:603
+#, no-c-format
+msgid "<literal>transaction.max_merge_docs</literal> = 10"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:607
+#, no-c-format
+msgid "<literal>batch.max_merge_docs</literal> = 100"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:611
+#, no-c-format
+msgid "<literal>transaction.merge_factor</literal> = 20"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:615
+#, no-c-format
+msgid "<literal>batch.merge_factor</literal> = 20"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:619
+#, no-c-format
+msgid "All other values will use the defaults defined in Lucene."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:621
+#, no-c-format
+msgid "The default for all values is to leave them at Lucene's own default, so the listed values in the following table actually depend on the version of Lucene you are using; values shown are relative to version <literal>2.4</literal>. For more information about Lucene indexing performances, please refer to the Lucene documentation."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:628
+#, no-c-format
+msgid "List of indexing performance and behavior properties"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:637
+#, no-c-format
+msgid "Default Value"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:643
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_buffered_delete_terms"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:645
+#, no-c-format
+msgid "Determines the minimal number of delete terms required before the buffered in-memory delete terms are applied and flushed. If there are documents buffered in memory at the time, they are merged and a new segment is created."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:650 configuration.xml:660
+#, no-c-format
+msgid "Disabled (flushes by RAM usage)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:654
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_buffered_docs"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:656
+#, no-c-format
+msgid "Controls the amount of documents buffered in memory during indexing. The bigger the more RAM is consumed."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:664
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_field_length"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:666
+#, no-c-format
+msgid "The maximum number of terms that will be indexed for a single field. This limits the amount of memory required for indexing so that very large data will not crash the indexing process by running out of memory. This setting refers to the number of running terms, not to the number of different terms."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:671
+#, no-c-format
+msgid "This silently truncates large documents, excluding from the index all terms that occur further in the document. If you know your source documents are large, be sure to set this value high enough to accommodate the expected size. If you set it to Integer.MAX_VALUE, then the only limit is your memory, but you should anticipate an OutOfMemoryError."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:676
+#, no-c-format
+msgid "If setting this value in <literal>batch</literal> differently than in <literal>transaction</literal> you may get different data (and results) in your index depending on the indexing mode."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:682
+#, no-c-format
+msgid "10000"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:686
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].max_merge_docs"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:688
+#, no-c-format
+msgid "Defines the largest number of documents allowed in a segment. Larger values are best for batched indexing and speedier searches. Small values are best for transaction indexing."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:693
+#, no-c-format
+msgid "Unlimited (Integer.MAX_VALUE)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:697
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].merge_factor"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:699
+#, no-c-format
+msgid "Controls segment merge frequency and size."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:700
+#, no-c-format
+msgid "Determines how often segment indexes are merged when insertion occurs. With smaller values, less RAM is used while indexing, and searches on unoptimized indexes are faster, but indexing speed is slower. With larger values, more RAM is used during indexing, and while searches on unoptimized indexes are slower, indexing is faster. Thus larger values (> 10) are best for batch index creation, and smaller values (< 10) for indexes that are interactively maintained. The value must no be lower than 2."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:710
+#, no-c-format
+msgid "<entry>10</entry>"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:714
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].ram_buffer_size"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:716
+#, no-c-format
+msgid "Controls the amount of RAM in MB dedicated to document buffers. When used together max_buffered_docs a flush occurs for whichever event happens first."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:718
+#, no-c-format
+msgid "Generally for faster indexing performance it's best to flush by RAM usage instead of document count and use as large a RAM buffer as you can."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:723
+#, no-c-format
+msgid "16 MB"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:727
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].term_index_interval"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:729
+#, no-c-format
+msgid "Expert: Set the interval between indexed terms."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:730
+#, no-c-format
+msgid "Large values cause less memory to be used by IndexReader, but slow random-access to terms. Small values cause more memory to be used by an IndexReader, and speed random-access to terms. See Lucene documentation for more details."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:736
+#, no-c-format
+msgid "<entry>128</entry>"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:740
+#, no-c-format
+msgid "hibernate.search.[default|<indexname>].indexwriter.[transaction|batch].use_compound_file"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:742
+#, no-c-format
+msgid "The advantage of using the compound file format is that less file descriptors are used. The disadvantage is that indexing takes more time and temporary disk space. You can set this parameter to <literal>false</literal> in an attempt to improve the indexing time, but you could run out of file descriptors if <literal>mergeFactor</literal> is also large."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:748
+#, no-c-format
+msgid "Boolean parameter, use \"<literal>true</literal>\" or \"<literal>false</literal>\". The default value for this option is <literal>true</literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:753
+#, no-c-format
+msgid "true"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:759
+#, no-c-format
+msgid "To tune the indexing speed it might be useful to time the object loading from database in isolation from the writes to the index. To achieve this set the <literal>blackhole</literal> as worker backend and start you indexing routines. This backend does not disable Hibernate Search: it will still generate the needed changesets to the index, but will discard them instead of flushing them to the index. As opposite to setting the <literal>hibernate.search.indexing_strategy</literal> to <literal>manual</literal> when using <literal>blackhole</literal> it will possibly load more data to rebuild the index from associated entities."
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:769
+#, no-c-format
+msgid "hibernate.search.worker.backend blackhole"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:771
+#, no-c-format
+msgid "The recommended approach is to focus first on optimizing the object loading, and then use the timings you achieve as a baseline to tune the indexing process."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:773
+#, no-c-format
+msgid "The <literal>blackhole</literal> backend is not meant to be used in production, only as a tool to identify indexing bottlenecks."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:781
+#, no-c-format
+msgid "LockFactory configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:785
+#, no-c-format
+msgid "Lucene Directories have default locking strategies which work well for most cases, but it's possible to specify for each index managed by Hibernate Search which LockingFactory you want to use."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:789
+#, no-c-format
+msgid "Some of these locking strategies require a filesystem level lock and may be used even on RAM based indexes, but this is not recommended and of no practical use."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:795
+#, no-c-format
+msgid "To select a locking factory, set the <literal>hibernate.search.<index>.locking_strategy</literal> option to one of <literal>simple</literal>, <literal>native</literal>, <literal>single</literal> or <literal>none</literal>, or set it to the fully qualified name of an implementation of <literal>org.hibernate.search.store.LockFactoryFactory</literal>; Implementing this interface you can provide a custom <literal>org.apache.lucene.store.LockFactory</literal>."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:804
+#, no-c-format
+msgid "List of available LockFactory implementations"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:809
+#, no-c-format
+msgid "name"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:819
+#, no-c-format
+msgid "simple"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:821
+#, no-c-format
+msgid "org.apache.lucene.store.SimpleFSLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:824
+#, no-c-format
+msgid "Safe implementation based on Java's File API, it marks the usage of the index by creating a marker file."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:827
+#, no-c-format
+msgid "If for some reason you had to kill your application, you will need to remove this file before restarting it."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:830
+#, no-c-format
+msgid "This is the default implementation for <literal>FSDirectoryProvider</literal>,<literal>FSMasterDirectoryProvider</literal> and <literal>FSSlaveDirectoryProvider</literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:837
+#, no-c-format
+msgid "native"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:839
+#, no-c-format
+msgid "org.apache.lucene.store.NativeFSLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:842
+#, no-c-format
+msgid "As does <literal>simple</literal> this also marks the usage of the index by creating a marker file, but this one is using native OS file locks so that even if your application crashes the locks will be cleaned up."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:847
+#, no-c-format
+msgid "This implementation has known problems on NFS."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:852
+#, no-c-format
+msgid "single"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:854
+#, no-c-format
+msgid "org.apache.lucene.store.SingleInstanceLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:857
+#, no-c-format
+msgid "This LockFactory doesn't use a file marker but is a Java object lock held in memory; therefore it's possible to use it only when you are sure the index is not going to be shared by any other process."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:862
+#, no-c-format
+msgid "This is the default implementation for <literal>RAMDirectoryProvider</literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:870
+#, no-c-format
+msgid "org.apache.lucene.store.NoLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:873
+#, no-c-format
+msgid "All changes to this index are not coordinated by any lock; test your application carefully and make sure you know what it means."
+msgstr ""
+
+#. Tag: section
+#: configuration.xml:880
+#, no-c-format
+msgid "Configuration example:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:884
+#, no-c-format
+msgid ""
+ "hibernate.search.default.locking_strategy simple\n"
+ "hibernate.search.Animals.locking_strategy native\n"
+ "hibernate.search.Books.locking_strategy org.custom.components.MyLockingFactory"
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/configuration.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/getting-started.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/getting-started.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/getting-started.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,656 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: getting-started.xml:30
+#, no-c-format
+msgid "Getting started"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:32
+#, no-c-format
+msgid "Welcome to Hibernate Search! The following chapter will guide you through the initial steps required to integrate Hibernate Search into an existing Hibernate enabled application. In case you are a Hibernate new timer we recommend you start <ulink url=\"http://hibernate.org/152.html\">here</ulink>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:39
+#, no-c-format
+msgid "System Requirements"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:42
+#, no-c-format
+msgid "System requirements"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:47
+#, no-c-format
+msgid "Java Runtime"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:49
+#, no-c-format
+msgid "A JDK or JRE version <emphasis>5</emphasis> or greater. You can download a Java Runtime for Windows/Linux/Solaris <ulink url=\"http://java.sun.com/javase/downloads/\">here</ulink>."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:55
+#, no-c-format
+msgid "Hibernate Search"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:57
+#, no-c-format
+msgid "<literal>hibernate-search.jar</literal> and all runtime dependencies from the <literal>lib</literal> directory of the Hibernate Search distribution. Please refer to <filename>README.txt </filename>in the lib directory to understand which dependencies are required."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:65
+#, no-c-format
+msgid "Hibernate Core"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:67
+#, no-c-format
+msgid "This instructions have been tested against Hibernate 3.3.x. You will need <literal>hibernate-core.jar</literal> and its transitive dependencies from the <literal>lib</literal> directory of the distribution. Refer to <literal>README.txt</literal> in the <literal>lib</literal> directory of the distribution to determine the minimum runtime requirements."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:76
+#, no-c-format
+msgid "Hibernate Annotations"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:78
+#, no-c-format
+msgid "Even though Hibernate Search can be used without Hibernate Annotations the following instructions will use them for basic entity configuration (<emphasis>@Entity, @Id, @OneToMany,...</emphasis>). This part of the configuration could also be expressed in xml or code. However, Hibernate Search itself has its own set of annotations (<emphasis>@Indexed, @DocumentId, @Field,...</emphasis>) for which there exists so far no alternative configuration. The tutorial is tested against version 3.4.x of Hibernate Annotations."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:92
+#, no-c-format
+msgid "You can download all dependencies from the Hibernate <ulink url=\"http://www.hibernate.org/6.html\">download site</ulink>. You can also verify the dependency versions against the <ulink url=\"http://www.hibernate.org/6.html#A3\">Hibernate Compatibility Matrix</ulink>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:100
+#, no-c-format
+msgid "Using Maven"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:102
+#, no-c-format
+msgid "Instead of managing all dependencies manually, maven users have the possibility to use the <ulink url=\"http://repository.jboss.com/maven2\">JBoss maven repository</ulink>. Just add the JBoss repository url to the <emphasis>repositories</emphasis> section of your <filename>pom.xml</filename> or <filename>settings.xml</filename>:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:110
+#, no-c-format
+msgid "Adding the JBoss maven repository to <filename>settings.xml</filename>"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:113
+#, no-c-format
+msgid ""
+ "<repository>\n"
+ " <id>repository.jboss.org</id>\n"
+ " <name>JBoss Maven Repository</name>\n"
+ " <url>http://repository.jboss.org/maven2</url>\n"
+ " <layout>default</layout>\n"
+ "</repository>"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:116
+#, no-c-format
+msgid "Then add the following dependencies to your pom.xml:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:119
+#, no-c-format
+msgid "Maven dependencies for Hibernate Search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:121
+#, no-c-format
+msgid ""
+ "<dependency>\n"
+ " <groupId>org.hibernate</groupId>\n"
+ " <artifactId>hibernate-search</artifactId>\n"
+ " <version>3.1.0.GA</version>\n"
+ "</dependency>\n"
+ "<dependency>\n"
+ " <groupId>org.hibernate</groupId>\n"
+ " <artifactId>hibernate-annotations</artifactId>\n"
+ " <version>3.4.0.GA</version>\n"
+ "</dependency>\n"
+ "<dependency>\n"
+ " <groupId>org.hibernate</groupId>\n"
+ " <artifactId>hibernate-entitymanager</artifactId>\n"
+ " <version>3.4.0.GA</version>\n"
+ "</dependency>\n"
+ "<dependency>\n"
+ " <groupId>org.apache.solr</groupId>\n"
+ " <artifactId>solr-common</artifactId>\n"
+ " <version>1.3.0</version>\n"
+ "</dependency>\n"
+ "<dependency>\n"
+ " <groupId>org.apache.solr</groupId>\n"
+ " <artifactId>solr-core</artifactId>\n"
+ " <version>1.3.0</version>\n"
+ "</dependency>\n"
+ "<dependency>\n"
+ " <groupId>org.apache.lucene</groupId>\n"
+ " <artifactId>lucene-snowball</artifactId>\n"
+ " <version>2.4.0</version>\n"
+ "</dependency>"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:124
+#, no-c-format
+msgid "Not all dependencies are required. Only the <emphasis>hibernate-search</emphasis> dependency is mandatory. This dependency, together with its required transitive dependencies, contain all required classes needed to use Hibernate Search. <emphasis>hibernate-annotations</emphasis> is only needed if you want to use annotations to configure your domain model as we do in this tutorial. However, even if you choose not to use Hibernate Annotations you still have to use the Hibernate Search specific annotations, which are bundled with the hibernate-search jar file, to configure your Lucene index. Currently there is no XML configuration available for Hibernate Search. <emphasis>hibernate-entitymanager</emphasis> is required if you want to use Hibernate Search in conjunction with JPA. The Solr dependencies are needed if you want to utilize Solr's analyzer framework. More about this later. And finally, the <literal>lucene-snowball</literal> dependency is needed if you want to u!
se Lucene's snowball stemmer."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:142
+#, no-c-format
+msgid "Configuration"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:144
+#, no-c-format
+msgid "Once you have downloaded and added all required dependencies to your application you have to add a couple of properties to your hibernate configuration file. If you are using Hibernate directly this can be done in <literal>hibernate.properties</literal> or <literal>hibernate.cfg.xml</literal>. If you are using Hibernate via JPA you can also add the properties to <literal>persistence.xml</literal>. The good news is that for standard use most properties offer a sensible default. An example <filename>persistence.xml</filename> configuration could look like this:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:155
+#, no-c-format
+msgid "Basic configuration options to be added to <literal><filename>hibernate.properties</filename></literal>, <literal><filename>hibernate.cfg.xml</filename></literal> or <filename>persistence.xml</filename>"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:160
+#, no-c-format
+msgid ""
+ "...\n"
+ "<property name=\"hibernate.search.default.directory_provider\" \n"
+ " value=\"org.hibernate.search.store.FSDirectoryProvider\"/> \n"
+ "\n"
+ "<property name=\"hibernate.search.default.indexBase\" value=\"/var/lucene/indexes\"/> \n"
+ "..."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:163
+#, no-c-format
+msgid "First you have to tell Hibernate Search which <classname>DirectoryProvider</classname> to use. This can be achieved by setting the <literal>hibernate.search.default.directory_provider</literal> property. Apache Lucene has the notion of a <literal>Directory</literal> to store the index files. Hibernate Search handles the initialization and configuration of a Lucene <literal>Directory</literal> instance via a <literal>DirectoryProvider</literal>. In this tutorial we will use a subclass of <literal>DirectoryProvider</literal> called <classname>FSDirectoryProvider</classname>. This will give us the ability to physically inspect the Lucene indexes created by Hibernate Search (eg via <ulink url=\"http://www.getopt.org/luke/\">Luke</ulink>). Once you have a working configuration you can start experimenting with other directory providers (see <xref linkend=\"search-configuration-directory\"/>). Next to the directory provider you also have to specify the default root director!
y for all indexes via <literal>hibernate.search.default.indexBase</literal>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:180
+#, no-c-format
+msgid "Lets assume that your application contains the Hibernate managed classes <classname>example.Book</classname> and <classname>example.Author</classname> and you want to add free text search capabilities to your application in order to search the books contained in your database."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:187
+#, no-c-format
+msgid "Example entities Book and Author before adding Hibernate Search specific annotations"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:190
+#, no-c-format
+msgid ""
+ "package example;\n"
+ "...\n"
+ "@Entity\n"
+ "public class Book {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " private Integer id; \n"
+ "\n"
+ " private String title; \n"
+ "\n"
+ " private String subtitle; \n"
+ "\n"
+ " @ManyToMany \n"
+ " private Set<Author> authors = new HashSet<Author>();\n"
+ "\n"
+ " private Date publicationDate;\n"
+ " \n"
+ " public Book() {\n"
+ " } \n"
+ " \n"
+ " // standard getters/setters follow here\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:192
+#, no-c-format
+msgid ""
+ "package example;\n"
+ "...\n"
+ "@Entity\n"
+ "public class Author {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " private Integer id;\n"
+ "\n"
+ " private String name;\n"
+ "\n"
+ " public Author() {\n"
+ " } \n"
+ " \n"
+ " // standard getters/setters follow here\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:195
+#, no-c-format
+msgid "To achieve this you have to add a few annotations to the <classname>Book</classname> and <classname>Author</classname> class. The first annotation <literal>@Indexed</literal> marks <classname>Book</classname> as indexable. By design Hibernate Search needs to store an untokenized id in the index to ensure index unicity for a given entity. <literal>@DocumentId</literal> marks the property to use for this purpose and is in most cases the same as the database primary key. In fact since the 3.1.0 release of Hibernate Search <literal>@DocumentId</literal> is optional in the case where an <classname>@Id</classname> annotation exists."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:206
+#, no-c-format
+msgid "Next you have to mark the fields you want to make searchable. Let's start with <literal>title</literal> and <literal>subtitle</literal> and annotate both with <literal>@Field</literal>. The parameter <literal>index=Index.TOKENIZED</literal> will ensure that the text will be tokenized using the default Lucene analyzer. Usually, tokenizing means chunking a sentence into individual words and potentially excluding common words like <literal>'a'</literal> or '<literal>the</literal>'. We will talk more about analyzers a little later on. The second parameter we specify within <literal>@Field</literal>,<literal> store=Store.NO</literal>, ensures that the actual data will not be stored in the index. Whether this data is stored in the index or not has nothing to do with the ability to search for it. From Lucene's perspective it is not necessary to keep the data once the index is created. The benefit of storing it is the ability to retrieve it via projections (<xref linkend=\"p!
rojections\"/>)."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:222
+#, no-c-format
+msgid "Without projections, Hibernate Search will per default execute a Lucene query in order to find the database identifiers of the entities matching the query critera and use these identifiers to retrieve managed objects from the database. The decision for or against projection has to be made on a case to case basis. The default behaviour - <literal>Store.NO</literal> - is recommended since it returns managed objects whereas projections only return object arrays."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:230
+#, no-c-format
+msgid "After this short look under the hood let's go back to annotating the <classname>Book</classname> class. Another annotation we have not yet discussed is <literal>@DateBridge</literal>. This annotation is one of the built-in field bridges in Hibernate Search. The Lucene index is purely string based. For this reason Hibernate Search must convert the data types of the indexed fields to strings and vice versa. A range of predefined bridges are provided, including the <classname>DateBridge</classname> which will convert a <classname>java.util.Date</classname> into a <classname>String</classname> with the specified resolution. For more details see <xref linkend=\"search-mapping-bridge\"/>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:241
+#, no-c-format
+msgid "This leaves us with <literal>@IndexedEmbedded. </literal>This annotation is used to index associated entities (<literal>@ManyToMany</literal>, <literal>@*ToOne</literal> and <literal>@Embedded</literal>) as part of the owning entity. This is needed since a Lucene index document is a flat data structure which does not know anything about object relations. To ensure that the authors' name wil be searchable you have to make sure that the names are indexed as part of the book itself. On top of <literal>@IndexedEmbedded</literal> you will also have to mark all fields of the associated entity you want to have included in the index with <literal>@Indexed</literal>. For more details see <xref linkend=\"search-mapping-associated\"/>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:253
+#, no-c-format
+msgid "These settings should be sufficient for now. For more details on entity mapping refer to <xref linkend=\"search-mapping-entity\"/>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:257
+#, no-c-format
+msgid "Example entities after adding Hibernate Search annotations"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:260
+#, no-c-format
+msgid ""
+ "package example;\n"
+ "...\n"
+ "@Entity\n"
+ "<emphasis role=\"bold\">@Indexed</emphasis>\n"
+ "public class Book {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " <emphasis role=\"bold\">@DocumentId</emphasis>\n"
+ " private Integer id;\n"
+ " \n"
+ " <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>\n"
+ " private String title;\n"
+ " \n"
+ " <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>\n"
+ " private String subtitle; \n"
+ "\n"
+ " <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+ " @ManyToMany \n"
+ " private Set<Author> authors = new HashSet<Author>();\n"
+ "\n"
+ "<emphasis role=\"bold\"> @Field(index = Index.UN_TOKENIZED, store = Store.YES)\n"
+ " @DateBridge(resolution = Resolution.DAY)</emphasis>\n"
+ " private Date publicationDate;\n"
+ " \n"
+ " public Book() {\n"
+ " } \n"
+ " \n"
+ " // standard getters/setters follow here\n"
+ " ... \n"
+ "}"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:262
+#, no-c-format
+msgid ""
+ "package example;\n"
+ "...\n"
+ "@Entity\n"
+ "public class Author {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " private Integer id;\n"
+ "\n"
+ " <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</emphasis>\n"
+ " private String name;\n"
+ "\n"
+ " public Author() {\n"
+ " } \n"
+ " \n"
+ " // standard getters/setters follow here\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:267
+#, no-c-format
+msgid "Indexing"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:269
+#, no-c-format
+msgid "Hibernate Search will transparently index every entity persisted, updated or removed through Hibernate Core. However, you have to trigger an initial indexing to populate the Lucene index with the data already present in your database. Once you have added the above properties and annotations it is time to trigger an initial batch index of your books. You can achieve this by using one of the following code snippets (see also <xref linkend=\"search-batchindex\"/>):"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:278
+#, no-c-format
+msgid "Using Hibernate Session to index data"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:280
+#, no-c-format
+msgid ""
+ "FullTextSession fullTextSession = Search.getFullTextSession(session);\n"
+ "Transaction tx = fullTextSession.beginTransaction();\n"
+ "\n"
+ "List books = session.createQuery(\"from Book as book\").list();\n"
+ "for (Book book : books) {\n"
+ " <emphasis role=\"bold\">fullTextSession.index(book);</emphasis>\n"
+ "}\n"
+ "\n"
+ "tx.commit(); //index is written at commit time"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:284
+#, no-c-format
+msgid "Using JPA to index data"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:286
+#, no-c-format
+msgid ""
+ "EntityManager em = entityManagerFactory.createEntityManager();\n"
+ "FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(em);\n"
+ "em.getTransaction().begin();\n"
+ "\n"
+ "List books = em.createQuery(\"select book from Book as book\").getResultList();\n"
+ "for (Book book : books) {\n"
+ " <emphasis role=\"bold\">fullTextEntityManager.index(book);</emphasis>\n"
+ "} \n"
+ "\n"
+ "em.getTransaction().commit();\n"
+ "em.close();"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:289
+#, no-c-format
+msgid "After executing the above code, you should be able to see a Lucene index under <literal>/var/lucene/indexes/example.Book</literal>. Go ahead an inspect this index with <ulink url=\"http://www.getopt.org/luke/\">Luke</ulink>. It will help you to understand how Hibernate Search works."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:297
+#, no-c-format
+msgid "Searching"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:299
+#, no-c-format
+msgid "Now it is time to execute a first search. The general approach is to create a native Lucene query and then wrap this query into a org.hibernate.Query in order to get all the functionality one is used to from the Hibernate API. The following code will prepare a query against the indexed fields, execute it and return a list of <classname>Book</classname>s."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:307
+#, no-c-format
+msgid "Using Hibernate Session to create and execute a search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:309
+#, no-c-format
+msgid ""
+ "FullTextSession fullTextSession = Search.getFullTextSession(session);\n"
+ "Transaction tx = fullTextSession.beginTransaction();\n"
+ "\n"
+ "// create native Lucene query\n"
+ "String[] fields = new String[]{\"title\", \"subtitle\", \"authors.name\", \"publicationDate\"};\n"
+ "MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());\n"
+ "org.apache.lucene.search.Query query = parser.parse( \"Java rocks!\" );\n"
+ "\n"
+ "// wrap Lucene query in a org.hibernate.Query\n"
+ "org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(query, Book.class);\n"
+ "\n"
+ "// execute search\n"
+ "List result = hibQuery.list();\n"
+ " \n"
+ "tx.commit();\n"
+ "session.close();"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:313
+#, no-c-format
+msgid "Using JPA to create and execute a search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:315
+#, no-c-format
+msgid ""
+ "EntityManager em = entityManagerFactory.createEntityManager();\n"
+ "FullTextEntityManager fullTextEntityManager = \n"
+ " org.hibernate.hibernate.search.jpa.Search.getFullTextEntityManager(em);\n"
+ "em.getTransaction().begin();\n"
+ "\n"
+ "// create native Lucene query\n"
+ "String[] fields = new String[]{\"title\", \"subtitle\", \"authors.name\", \"publicationDate\"};\n"
+ "MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());\n"
+ "org.apache.lucene.search.Query query = parser.parse( \"Java rocks!\" );\n"
+ "\n"
+ "// wrap Lucene query in a javax.persistence.Query\n"
+ "javax.persistence.Query persistenceQuery = fullTextEntityManager.createFullTextQuery(query, Book.class);\n"
+ "\n"
+ "// execute search\n"
+ "List result = persistenceQuery.getResultList();\n"
+ "\n"
+ "em.getTransaction().commit();\n"
+ "em.close();"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:320
+#, no-c-format
+msgid "Analyzer"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:322
+#, no-c-format
+msgid "Let's make things a little more interesting now. Assume that one of your indexed book entities has the title \"Refactoring: Improving the Design of Existing Code\" and you want to get hits for all of the following queries: \"refactor\", \"refactors\", \"refactored\" and \"refactoring\". In Lucene this can be achieved by choosing an analyzer class which applies word stemming during the indexing <emphasis role=\"bold\">as well as</emphasis> search process. Hibernate Search offers several ways to configure the analyzer to use (see <xref linkend=\"analyzer\"/>):"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:333
+#, no-c-format
+msgid "Setting the <literal>hibernate.search.analyzer</literal> property in the configuration file. The specified class will then be the default analyzer."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:339
+#, no-c-format
+msgid "Setting the <literal><literal>@Analyzer</literal></literal> annotation at the entity level."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:344
+#, no-c-format
+msgid "Setting the <literal>@<literal>Analyzer</literal></literal> annotation at the field level."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:349
+#, no-c-format
+msgid "When using the <literal>@Analyzer</literal> annotation one can either specify the fully qualified classname of the analyzer to use or one can refer to an analyzer definition defined by the <literal>@AnalyzerDef</literal> annotation. In the latter case the Solr analyzer framework with its factories approach is utilized. To find out more about the factory classes available you can either browse the Solr JavaDoc or read the corresponding section on the <ulink url=\"http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters\">Solr Wiki.</ulink> Note that depending on the chosen factory class additional libraries on top of the Solr dependencies might be required. For example, the <classname>PhoneticFilterFactory</classname> depends on <ulink url=\"http://commons.apache.org/codec\">commons-codec</ulink>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:362
+#, no-c-format
+msgid "In the example below a <classname>StandardTokenizerFactory</classname> is used followed by two filter factories, <classname>LowerCaseFilterFactory</classname> and <classname>SnowballPorterFilterFactory</classname>. The standard tokenizer splits words at punctuation characters and hyphens while keeping email addresses and internet hostnames intact. It is a good general purpose tokenizer. The lowercase filter lowercases the letters in each token whereas the snowball filter finally applies language specific stemming."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:372
+#, no-c-format
+msgid "Generally, when using the Solr framework you have to start with a tokenizer followed by an arbitrary number of filters."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:376
+#, no-c-format
+msgid "Using <classname>@AnalyzerDef</classname> and the Solr framework to define and use an analyzer"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:379
+#, no-c-format
+msgid ""
+ "package example;\n"
+ "...\n"
+ "@Entity\n"
+ "@Indexed\n"
+ "<emphasis role=\"bold\">@AnalyzerDef(name = \"customanalyzer\",\n"
+ " tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+ " filters = {\n"
+ " @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+ " @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {\n"
+ " @Parameter(name = \"language\", value = \"English\")\n"
+ " })\n"
+ " })</emphasis>\n"
+ "public class Book {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Integer id;\n"
+ " \n"
+ " @Field(index=Index.TOKENIZED, store=Store.NO)\n"
+ " <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")</emphasis>\n"
+ " private String title;\n"
+ " \n"
+ " @Field(index=Index.TOKENIZED, store=Store.NO)\n"
+ " <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")</emphasis>\n"
+ " private String subtitle; \n"
+ "\n"
+ " @IndexedEmbedded\n"
+ " @ManyToMany \n"
+ " private Set<Author> authors = new HashSet<Author>();\n"
+ "\n"
+ "<emphasis role=\"bold\"> </emphasis> @Field(index = Index.UN_TOKENIZED, store = Store.YES)\n"
+ " @DateBridge(resolution = Resolution.DAY)\n"
+ " private Date publicationDate;\n"
+ " \n"
+ " public Book() {\n"
+ " } \n"
+ " \n"
+ " // standard getters/setters follow here\n"
+ " ... \n"
+ "}"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:384
+#, no-c-format
+msgid "What's next"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:386
+#, no-c-format
+msgid "The above paragraphs hopefully helped you getting an overview of Hibernate Search. Using the maven archetype plugin and the following command you can create an initial runnable maven project structure populated with the example code of this tutorial."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:392
+#, no-c-format
+msgid "Using the Maven archetype to create tutorial sources"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:394
+#, no-c-format
+msgid ""
+ "mvn archetype:create \\ \n"
+ " -DarchetypeGroupId=org.hibernate \\\n"
+ " -DarchetypeArtifactId=hibernate-search-quickstart \\ \n"
+ " -DarchetypeVersion=3.1.0.GA \\\n"
+ " -DgroupId=my.company -DartifactId=quickstart"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:397
+#, no-c-format
+msgid "Using the maven project you can execute the examples, inspect the file system based index and search and retrieve a list of managed objects. Just run <emphasis>mvn package</emphasis> to compile the sources and run the unit tests."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:402
+#, no-c-format
+msgid "The next step after this tutorial is to get more familiar with the overall architecture of Hibernate Search (<xref linkend=\"search-architecture\"/>) and explore the basic features in more detail. Two topics which were only briefly touched in this tutorial were analyzer configuration (<xref linkend=\"analyzer\"/>) and field bridges (<xref linkend=\"search-mapping-bridge\"/>), both important features required for more fine-grained indexing. More advanced topics cover clustering (<xref linkend=\"jms-backend\"/>) and large indexes handling (<xref linkend=\"search-configuration-directory-sharding\"/>)."
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/getting-started.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/lucene-native.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/lucene-native.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/lucene-native.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,260 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: lucene-native.xml:30
+#, no-c-format
+msgid "Advanced features"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:33
+#, no-c-format
+msgid "SearchFactory"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:35
+#, no-c-format
+msgid "The <classname>SearchFactory</classname> object keeps track of the underlying Lucene resources for Hibernate Search, it's also a convenient way to access Lucene natively. The <classname>SearchFactory</classname> can be accessed from a <classname>FullTextSession</classname>:"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:41
+#, no-c-format
+msgid "Accessing the <classname>SearchFactory</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:43
+#, no-c-format
+msgid ""
+ "FullTextSession fullTextSession = Search.getFullTextSession(regularSession);\n"
+ "SearchFactory searchFactory = fullTextSession.getSearchFactory();"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:48
+#, no-c-format
+msgid "Accessing a Lucene Directory"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:50
+#, no-c-format
+msgid "You can always access the Lucene directories through plain Lucene, the Directory structure is in no way different with or without Hibernate Search. However there are some more convenient ways to access a given Directory. The <classname>SearchFactory</classname> keeps track of the <classname>DirectoryProvider</classname>s per indexed class. One directory provider can be shared amongst several indexed classes if the classes share the same underlying index directory. While usually not the case, a given entity can have several <classname>DirectoryProvider</classname>s if the index is sharded (see <xref linkend=\"search-configuration-directory-sharding\"/>)."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:62
+#, no-c-format
+msgid "Accessing the Lucene <classname>Directory</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:64
+#, no-c-format
+msgid ""
+ "DirectoryProvider[] provider = searchFactory.getDirectoryProviders(Order.class);\n"
+ "org.apache.lucene.store.Directory directory = provider[0].getDirectory();"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:67
+#, no-c-format
+msgid "In this example, directory points to the lucene index storing <classname>Order</classname>s information. Note that the obtained Lucene directory must not be closed (this is Hibernate Search responsibility)."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:74
+#, no-c-format
+msgid "Using an IndexReader"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:76
+#, no-c-format
+msgid "Queries in Lucene are executed on an <literal>IndexReader</literal>. Hibernate Search caches all index readers to maximize performance. Your code can access this cached resources, but you have to follow some \"good citizen\" rules."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:82
+#, no-c-format
+msgid "Accessing an <classname>IndexReader</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:84
+#, no-c-format
+msgid ""
+ "DirectoryProvider orderProvider = searchFactory.getDirectoryProviders(Order.class)[0];\n"
+ "DirectoryProvider clientProvider = searchFactory.getDirectoryProviders(Client.class)[0];\n"
+ "\n"
+ "ReaderProvider readerProvider = searchFactory.getReaderProvider();\n"
+ "IndexReader reader = readerProvider.openReader(orderProvider, clientProvider);\n"
+ "\n"
+ "try {\n"
+ " //do read-only operations on the reader\n"
+ "}\n"
+ "finally {\n"
+ " readerProvider.closeReader(reader);\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:87
+#, no-c-format
+msgid "The ReaderProvider (described in <xref linkend=\"search-architecture-readerstrategy\"/>), will open an IndexReader on top of the index(es) referenced by the directory providers. Because this <classname>IndexReader</classname> is shared amongst several clients, you must adhere to the following rules:"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:95
+#, no-c-format
+msgid "Never call indexReader.close(), but always call readerProvider.closeReader(reader), preferably in a finally block."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:101
+#, no-c-format
+msgid "Don't use this <classname>IndexReader</classname> for modification operations (you would get an exception). If you want to use a read/write index reader, open one from the Lucene Directory object."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:108
+#, no-c-format
+msgid "Aside from those rules, you can use the IndexReader freely, especially to do native queries. Using the shared <literal>IndexReader</literal>s will make most queries more efficient."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:115
+#, no-c-format
+msgid "Customizing Lucene's scoring formula"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:117
+#, no-c-format
+msgid "Lucene allows the user to customize its scoring formula by extending <classname>org.apache.lucene.search.Similarity</classname>. The abstract methods defined in this class match the factors of the following formula calculating the score of query q for document d:"
+msgstr ""
+
+#. Tag: emphasis
+#: lucene-native.xml:122
+#, no-c-format
+msgid "score(q,d) = coord(q,d) · queryNorm(q) · ∑<subscript>t in q</subscript> ( tf(t in d) · idf(t)<superscript>2</superscript> · t.getBoost() · norm(t,d) )"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:131
+#, no-c-format
+msgid "Factor"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:133
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:139
+#, no-c-format
+msgid "tf(t ind)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:141
+#, no-c-format
+msgid "Term frequency factor for the term (t) in the document (d)."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:146
+#, no-c-format
+msgid "idf(t)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:148
+#, no-c-format
+msgid "Inverse document frequency of the term."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:152
+#, no-c-format
+msgid "coord(q,d)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:154
+#, no-c-format
+msgid "Score factor based on how many of the query terms are found in the specified document."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:159
+#, no-c-format
+msgid "queryNorm(q)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:161
+#, no-c-format
+msgid "Normalizing factor used to make scores between queries comparable."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:166
+#, no-c-format
+msgid "t.getBoost()"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:168
+#, no-c-format
+msgid "Field boost."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:172
+#, no-c-format
+msgid "norm(t,d)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:174
+#, no-c-format
+msgid "Encapsulates a few (indexing time) boost and length factors."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:178
+#, no-c-format
+msgid "It is beyond the scope of this manual to explain this formula in more detail. Please refer to <classname>Similarity</classname>'s Javadocs for more information."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:183
+#, no-c-format
+msgid ""
+ "Hibernate Search provides two ways to modify Lucene's similarity calculation. First you can set the default similarity by specifying the fully specified classname of your <classname>Similarity</classname> implementation using the property <constant>hibernate.search.similarity</constant>. The default value is <classname>org.apache.lucene.search.DefaultSimilarity</classname>. Additionally you can override the default similarity on class level using the <literal>@Similarity</literal> annotation.<programlisting>@Entity\n"
+ "@Indexed\n"
+ "<emphasis role=\"bold\">@Similarity(impl = DummySimilarity.class)</emphasis>\n"
+ "public class Book {\n"
+ " ...\n"
+ "}</programlisting>As an example, let's assume it is not important how often a term appears in a document. Documents with a single occurrence of the term should be scored the same as documents with multiple occurrences. In this case your custom implementation of the method <methodname>tf(float freq)</methodname> should return 1.0."
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/lucene-native.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/mapping.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/mapping.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/mapping.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1637 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 16:41+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: mapping.xml:30
+#, no-c-format
+msgid "Mapping entities to the index structure"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:32
+#, no-c-format
+msgid "All the metadata information needed to index entities is described through annotations. There is no need for xml mapping files. In fact there is currently no xml configuration option available (see <ulink url=\"http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-210\">HSEARCH-210</ulink>). You can still use hibernate mapping files for the basic Hibernate configuration, but the Search specific configuration has to be expressed via annotations."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:41
+#, no-c-format
+msgid "Mapping an entity"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:44
+#, no-c-format
+msgid "Basic mapping"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:46
+#, no-c-format
+msgid "First, we must declare a persistent class as indexable. This is done by annotating the class with <literal>@Indexed</literal> (all entities not annotated with <literal>@Indexed</literal> will be ignored by the indexing process):"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:52
+#, no-c-format
+msgid "Making a class indexable using the <classname>@Indexed</classname> annotation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:55
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "<emphasis role=\"bold\">@Indexed(index=\"indexes/essays\")</emphasis>\n"
+ "public class Essay {\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:58
+#, no-c-format
+msgid "The <literal>index</literal> attribute tells Hibernate what the Lucene directory name is (usually a directory on your file system). It is recommended to define a base directory for all Lucene indexes using the <literal>hibernate.search.default.indexBase</literal> property in your configuration file. Alternatively you can specify a base directory per indexed entity by specifying <literal>hibernate.search.<index>.indexBase, </literal>where <literal><index></literal> is the fully qualified classname of the indexed entity. Each entity instance will be represented by a Lucene <classname>Document</classname> inside the given index (aka Directory)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:70
+#, no-c-format
+msgid "For each property (or attribute) of your entity, you have the ability to describe how it will be indexed. The default (no annotation present) means that the property is completely ignored by the indexing process. <literal>@Field</literal> does declare a property as indexed. When indexing an element to a Lucene document you can specify how it is indexed:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:79
+#, no-c-format
+msgid "<literal>name</literal> : describe under which name, the property should be stored in the Lucene Document. The default value is the property name (following the JavaBeans convention)"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:85
+#, no-c-format
+msgid "<literal>store</literal> : describe whether or not the property is stored in the Lucene index. You can store the value <literal>Store.YES</literal> (consuming more space in the index but allowing projection, see <xref linkend=\"projections\"/> for more information), store it in a compressed way <literal>Store.COMPRESS</literal> (this does consume more CPU), or avoid any storage <literal>Store.NO</literal> (this is the default value). When a property is stored, you can retrieve its original value from the Lucene Document. This is not related to whether the element is indexed or not."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:98
+#, no-c-format
+msgid "index: describe how the element is indexed and the type of information store. The different values are <literal>Index.NO</literal> (no indexing, ie cannot be found by a query), <literal>Index.TOKENIZED</literal> (use an analyzer to process the property), <literal>Index.UN_TOKENIZED</literal> (no analyzer pre-processing), <literal>Index.NO_NORMS</literal> (do not store the normalization data). The default value is <literal>TOKENIZED</literal>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:109
+#, no-c-format
+msgid "termVector: describes collections of term-frequency pairs. This attribute enables term vectors being stored during indexing so they are available within documents. The default value is TermVector.NO."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:114
+#, no-c-format
+msgid "The different values of this attribute are:"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:120
+#, no-c-format
+msgid "Value"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:122
+#, no-c-format
+msgid "Definition"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:128
+#, no-c-format
+msgid "TermVector.YES"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:130
+#, no-c-format
+msgid "Store the term vectors of each document. This produces two synchronized arrays, one contains document terms and the other contains the term's frequency."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:136
+#, no-c-format
+msgid "TermVector.NO"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:138
+#, no-c-format
+msgid "Do not store term vectors."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:142
+#, no-c-format
+msgid "TermVector.WITH_OFFSETS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:144
+#, no-c-format
+msgid "Store the term vector and token offset information. This is the same as TermVector.YES plus it contains the starting and ending offset position information for the terms."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:151
+#, no-c-format
+msgid "TermVector.WITH_POSITIONS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:153
+#, no-c-format
+msgid "Store the term vector and token position information. This is the same as TermVector.YES plus it contains the ordinal positions of each occurrence of a term in a document."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:160
+#, no-c-format
+msgid "TermVector.WITH_POSITION_OFFSETS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:162
+#, no-c-format
+msgid "Store the term vector, token position and offset information. This is a combination of the YES, WITH_OFFSETS and WITH_POSITIONS."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:172
+#, no-c-format
+msgid "Whether or not you want to store the original data in the index depends on how you wish to use the index query result. For a regular Hibernate Search usage storing is not necessary. However you might want to store some fields to subsequently project them (see <xref linkend=\"projections\"/> for more information)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:178
+#, no-c-format
+msgid "Whether or not you want to tokenize a property depends on whether you wish to search the element as is, or by the words it contains. It make sense to tokenize a text field, but tokenizing a date field probably not. Note that fields used for sorting must not be tokenized."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:184
+#, no-c-format
+msgid "Finally, the id property of an entity is a special property used by Hibernate Search to ensure index unicity of a given entity. By design, an id has to be stored and must not be tokenized. To mark a property as index id, use the <literal>@DocumentId</literal> annotation. If you are using Hibernate Annotations and you have specified @Id you can omit @DocumentId. The chosen entity id will also be used as document id."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:193
+#, no-c-format
+msgid "Adding <classname>@DocumentId</classname> ad <classname>@Field</classname> annotations to an indexed entity"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:196
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed(index=\"indexes/essays\")\n"
+ "public class Essay {\n"
+ " ...\n"
+ "\n"
+ " @Id\n"
+ " <emphasis role=\"bold\">@DocumentId</emphasis>\n"
+ " public Long getId() { return id; }\n"
+ "\n"
+ " <emphasis role=\"bold\">@Field(name=\"Abstract\", index=Index.TOKENIZED, store=Store.YES)</emphasis>\n"
+ " public String getSummary() { return summary; }\n"
+ "\n"
+ " @Lob\n"
+ " <emphasis role=\"bold\">@Field(index=Index.TOKENIZED)</emphasis>\n"
+ " public String getText() { return text; }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:199
+#, no-c-format
+msgid "The above annotations define an index with three fields: <literal>id</literal> , <literal>Abstract</literal> and <literal>text</literal> . Note that by default the field name is decapitalized, following the JavaBean specification"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:206
+#, no-c-format
+msgid "Mapping properties multiple times"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:208
+#, no-c-format
+msgid "Sometimes one has to map a property multiple times per index, with slightly different indexing strategies. For example, sorting a query by field requires the field to be <literal>UN_TOKENIZED</literal>. If one wants to search by words in this property and still sort it, one need to index it twice - once tokenized and once untokenized. @Fields allows to achieve this goal."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:216
+#, no-c-format
+msgid "Using @Fields to map a property multiple times"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:218
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed(index = \"Book\" )\n"
+ "public class Book {\n"
+ " <emphasis role=\"bold\">@Fields( {</emphasis>\n"
+ " @Field(index = Index.TOKENIZED),\n"
+ " @Field(name = \"summary_forSort\", index = Index.UN_TOKENIZED, store = Store.YES)\n"
+ " <emphasis role=\"bold\">} )</emphasis>\n"
+ " public String getSummary() {\n"
+ " return summary;\n"
+ " }\n"
+ "\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:221
+#, no-c-format
+msgid "The field <literal>summary</literal> is indexed twice, once as <literal>summary</literal> in a tokenized way, and once as <literal>summary_forSort</literal> in an untokenized way. @Field supports 2 attributes useful when @Fields is used:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:228
+#, no-c-format
+msgid "analyzer: defines a @Analyzer annotation per field rather than per property"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:233
+#, no-c-format
+msgid "bridge: defines a @FieldBridge annotation per field rather than per property"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:238
+#, no-c-format
+msgid "See below for more information about analyzers and field bridges."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:243
+#, no-c-format
+msgid "Embedded and associated objects"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:245
+#, no-c-format
+msgid "Associated objects as well as embedded objects can be indexed as part of the root entity index. This is useful if you expect to search a given entity based on properties of associated objects. In the following example the aim is to return places where the associated city is Atlanta (In the Lucene query parser language, it would translate into <code>address.city:Atlanta</code>)."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:253
+#, no-c-format
+msgid "Using @IndexedEmbedded to index associations"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:255
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "public class Place {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Long id;\n"
+ "\n"
+ " @Field( index = Index.TOKENIZED )\n"
+ " private String name;\n"
+ "\n"
+ " @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )\n"
+ " <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+ " private Address address;\n"
+ " ....\n"
+ "}\n"
+ "\n"
+ "@Entity\n"
+ "public class Address {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " private Long id;\n"
+ "\n"
+ " @Field(index=Index.TOKENIZED)\n"
+ " private String street;\n"
+ "\n"
+ " @Field(index=Index.TOKENIZED)\n"
+ " private String city;\n"
+ "\n"
+ " <emphasis role=\"bold\">@ContainedIn</emphasis>\n"
+ " @OneToMany(mappedBy=\"address\")\n"
+ " private Set<Place> places;\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:258
+#, no-c-format
+msgid "In this example, the place fields will be indexed in the <literal>Place</literal> index. The <literal>Place</literal> index documents will also contain the fields <literal>address.id</literal>, <literal>address.street</literal>, and <literal>address.city</literal> which you will be able to query. This is enabled by the <literal>@IndexedEmbedded</literal> annotation."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:265
+#, no-c-format
+msgid "Be careful. Because the data is denormalized in the Lucene index when using the <classname>@IndexedEmbedded</classname> technique, Hibernate Search needs to be aware of any change in the <classname>Place</classname> object and any change in the <classname>Address</classname> object to keep the index up to date. To make sure the <literal><classname>Place</classname></literal> Lucene document is updated when it's <classname>Address</classname> changes, you need to mark the other side of the bidirectional relationship with <classname>@ContainedIn</classname>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:275
+#, no-c-format
+msgid "<literal>@ContainedIn</literal> is only useful on associations pointing to entities as opposed to embedded (collection of) objects."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:279
+#, no-c-format
+msgid "Let's make our example a bit more complex:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:282
+#, no-c-format
+msgid "Nested usage of <classname>@IndexedEmbedded</classname> and <classname>@ContainedIn</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:285
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "public class Place {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Long id;\n"
+ "\n"
+ " @Field( index = Index.TOKENIZED )\n"
+ " private String name;\n"
+ "\n"
+ " @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )\n"
+ " <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+ " private Address address;\n"
+ " ....\n"
+ "}\n"
+ "\n"
+ "@Entity\n"
+ "public class Address {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " private Long id;\n"
+ "\n"
+ " @Field(index=Index.TOKENIZED)\n"
+ " private String street;\n"
+ "\n"
+ " @Field(index=Index.TOKENIZED)\n"
+ " private String city;\n"
+ "\n"
+ " <emphasis role=\"bold\">@IndexedEmbedded(depth = 1, prefix = \"ownedBy_\")</emphasis>\n"
+ " private Owner ownedBy;\n"
+ "\n"
+ " <emphasis role=\"bold\">@ContainedIn</emphasis>\n"
+ " @OneToMany(mappedBy=\"address\")\n"
+ " private Set<Place> places;\n"
+ " ...\n"
+ "}\n"
+ "\n"
+ "@Embeddable\n"
+ "public class Owner {\n"
+ " @Field(index = Index.TOKENIZED)\n"
+ " private String name;\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:288
+#, no-c-format
+msgid "Any <literal>@*ToMany, @*ToOne</literal> and <literal>@Embedded</literal> attribute can be annotated with <literal>@IndexedEmbedded</literal>. The attributes of the associated class will then be added to the main entity index. In the previous example, the index will contain the following fields"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:296
+#, no-c-format
+msgid "<para>id</para>"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:300
+#, no-c-format
+msgid "name"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:304
+#, no-c-format
+msgid "address.street"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:308
+#, no-c-format
+msgid "address.city"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:312
+#, no-c-format
+msgid "address.ownedBy_name"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:316
+#, no-c-format
+msgid "The default prefix is <literal>propertyName.</literal>, following the traditional object navigation convention. You can override it using the <literal>prefix</literal> attribute as it is shown on the <literal>ownedBy</literal> property."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:322
+#, no-c-format
+msgid "The prefix cannot be set to the empty string."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:325
+#, no-c-format
+msgid "The<literal> depth</literal> property is necessary when the object graph contains a cyclic dependency of classes (not instances). For example, if <classname>Owner</classname> points to <classname>Place</classname>. Hibernate Search will stop including Indexed embedded attributes after reaching the expected depth (or the object graph boundaries are reached). A class having a self reference is an example of cyclic dependency. In our example, because <literal>depth</literal> is set to 1, any <literal>@IndexedEmbedded</literal> attribute in Owner (if any) will be ignored."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:336
+#, no-c-format
+msgid "Using <literal>@IndexedEmbedded</literal> for object associations allows you to express queries such as:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:341
+#, no-c-format
+msgid "Return places where name contains JBoss and where address city is Atlanta. In Lucene query this would be"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:344
+#, no-c-format
+msgid "+name:jboss +address.city:atlanta"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:348
+#, no-c-format
+msgid "Return places where name contains JBoss and where owner's name contain Joe. In Lucene query this would be"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:351
+#, no-c-format
+msgid "+name:jboss +address.orderBy_name:joe"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:355
+#, no-c-format
+msgid "In a way it mimics the relational join operation in a more efficient way (at the cost of data duplication). Remember that, out of the box, Lucene indexes have no notion of association, the join operation is simply non-existent. It might help to keep the relational model normalized while benefiting from the full text index speed and feature richness."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:363
+#, no-c-format
+msgid "An associated object can itself (but does not have to) be <literal>@Indexed</literal>"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:367
+#, no-c-format
+msgid "When @IndexedEmbedded points to an entity, the association has to be directional and the other side has to be annotated <literal>@ContainedIn</literal> (as seen in the previous example). If not, Hibernate Search has no way to update the root index when the associated entity is updated (in our example, a <literal>Place</literal> index document has to be updated when the associated <classname>Address</classname> instance is updated)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:375
+#, no-c-format
+msgid "Sometimes, the object type annotated by <classname>@IndexedEmbedded</classname> is not the object type targeted by Hibernate and Hibernate Search. This is especially the case when interfaces are used in lieu of their implementation. For this reason you can override the object type targeted by Hibernate Search using the <methodname>targetElement</methodname> parameter."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:383
+#, no-c-format
+msgid "Using the <literal>targetElement</literal> property of <classname>@IndexedEmbedded</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:386
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "public class Address {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Long id;\n"
+ "\n"
+ " @Field(index= Index.TOKENIZED)\n"
+ " private String street;\n"
+ "\n"
+ " @IndexedEmbedded(depth = 1, prefix = \"ownedBy_\", <emphasis role=\"bold\">targetElement = Owner.class</emphasis>)\n"
+ " @Target(Owner.class)\n"
+ " private Person ownedBy;\n"
+ "\n"
+ "\n"
+ " ...\n"
+ "}\n"
+ "\n"
+ "@Embeddable\n"
+ "public class Owner implements Person { ... }"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:391
+#, no-c-format
+msgid "Boost factor"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:393
+#, no-c-format
+msgid "Lucene has the notion of <emphasis>boost factor</emphasis>. It's a way to give more weight to a field or to an indexed element over others during the indexation process. You can use <literal>@Boost</literal> at the @Field, method or class level."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:399
+#, no-c-format
+msgid "Using different ways of increasing the weight of an indexed element using a boost factor"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:402
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed(index=\"indexes/essays\")\n"
+ "<emphasis role=\"bold\">@Boost(1.7f)</emphasis>\n"
+ "public class Essay {\n"
+ " ...\n"
+ "\n"
+ " @Id\n"
+ " @DocumentId\n"
+ " public Long getId() { return id; }\n"
+ "\n"
+ " @Field(name=\"Abstract\", index=Index.TOKENIZED, store=Store.YES, boost=<emphasis\n"
+ " role=\"bold\">@Boost(2f)</emphasis>)\n"
+ " <emphasis role=\"bold\">@Boost(1.5f)</emphasis>\n"
+ " public String getSummary() { return summary; }\n"
+ "\n"
+ " @Lob\n"
+ " @Field(index=Index.TOKENIZED, boost=<emphasis role=\"bold\">@Boost(1.2f)</emphasis>)\n"
+ " public String getText() { return text; }\n"
+ "\n"
+ " @Field\n"
+ " public String getISBN() { return isbn; }\n"
+ "\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:405
+#, no-c-format
+msgid "In our example, <classname>Essay</classname>'s probability to reach the top of the search list will be multiplied by 1.7. The <methodname>summary</methodname> field will be 3.0 (2 * 1.5 - <methodname>@Field.boost</methodname> and <classname>@Boost</classname> on a property are cumulative) more important than the <methodname>isbn</methodname> field. The <methodname>text</methodname> field will be 1.2 times more important than the <methodname>isbn</methodname> field. Note that this explanation in strictest terms is actually wrong, but it is simple and close enough to reality for all practical purposes. Please check the Lucene documentation or the excellent <citetitle>Lucene In Action </citetitle> from Otis Gospodnetic and Erik Hatcher."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:420
+#, no-c-format
+msgid "Analyzer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:422
+#, no-c-format
+msgid "The default analyzer class used to index tokenized fields is configurable through the <literal>hibernate.search.analyzer</literal> property. The default value for this property is <classname>org.apache.lucene.analysis.standard.StandardAnalyzer</classname>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:427
+#, no-c-format
+msgid "You can also define the analyzer class per entity, property and even per @Field (useful when multiple fields are indexed from a single property)."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:432
+#, no-c-format
+msgid "Different ways of specifying an analyzer"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:434
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "<emphasis role=\"bold\">@Analyzer(impl = EntityAnalyzer.class)</emphasis>\n"
+ "public class MyEntity {\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Integer id;\n"
+ "\n"
+ " @Field(index = Index.TOKENIZED)\n"
+ " private String name;\n"
+ "\n"
+ " @Field(index = Index.TOKENIZED)\n"
+ " <emphasis role=\"bold\">@Analyzer(impl = PropertyAnalyzer.class)</emphasis>\n"
+ " private String summary;\n"
+ "\n"
+ " @Field(index = Index.TOKENIZED, <emphasis><emphasis role=\"bold\">analyzer = @Analyzer(impl = FieldAnalyzer.class</emphasis>)</emphasis>\n"
+ " private String body;\n"
+ "\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:437
+#, no-c-format
+msgid "In this example, <classname>EntityAnalyzer</classname> is used to index all tokenized properties (eg. <literal>name</literal>), except <literal>summary</literal> and <literal>body</literal> which are indexed with <classname>PropertyAnalyzer</classname> and <classname>FieldAnalyzer</classname> respectively."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:444
+#, no-c-format
+msgid "Mixing different analyzers in the same entity is most of the time a bad practice. It makes query building more complex and results less predictable (for the novice), especially if you are using a QueryParser (which uses the same analyzer for the whole query). As a rule of thumb, for any given field the same analyzer should be used for indexing and querying."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:453
+#, no-c-format
+msgid "Analyzer definitions"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:455
+#, no-c-format
+msgid "Analyzers can become quite complex to deal with for which reason Hibernate Search introduces the notion of analyzer definitions. An analyzer definition can be reused by many <classname>@Analyzer</classname> declarations. An analyzer definition is composed of:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:463
+#, no-c-format
+msgid "a name: the unique string used to refer to the definition"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:468
+#, no-c-format
+msgid "a tokenizer: responsible for tokenizing the input stream into individual words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:473
+#, no-c-format
+msgid "a list of filters: each filter is responsible to remove, modify or sometimes even add words into the stream provided by the tokenizer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:479
+#, no-c-format
+msgid "This separation of tasks - a tokenizer followed by a list of filters - allows for easy reuse of each individual component and let you build your customized analyzer in a very flexible way (just like Lego). Generally speaking the <classname>Tokenizer</classname> starts the analysis process by turning the character input into tokens which are then further processed by the <classname>TokenFilter</classname>s. Hibernate Search supports this infrastructure by utilizing the Solr analyzer framework. Make sure to add<filename> solr-core.jar and </filename><filename>solr-common.jar</filename> to your classpath to use analyzer definitions. In case you also want to utilizing a snowball stemmer also include the <filename>lucene-snowball.jar.</filename> Other Solr analyzers might depend on more libraries. For example, the <classname>PhoneticFilterFactory</classname> depends on <ulink url=\"http://commons.apache.org/codec\">commons-codec</ulink>. Your distribution of Hibernate Sea!
rch provides these dependencies in its <filename>lib</filename> directory."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:498
+#, no-c-format
+msgid "<classname>@AnalyzerDef</classname> and the Solr framework"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:501
+#, no-c-format
+msgid ""
+ "@AnalyzerDef(name=\"customanalyzer\",\n"
+ " tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+ " filters = {\n"
+ " @TokenFilterDef(factory = ISOLatin1AccentFilterFactory.class),\n"
+ " @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+ " @TokenFilterDef(factory = StopFilterFactory.class, params = {\n"
+ " @Parameter(name=\"words\", value= \"org/hibernate/search/test/analyzer/solr/stoplist.properties\" ),\n"
+ " @Parameter(name=\"ignoreCase\", value=\"true\")\n"
+ " })\n"
+ "})\n"
+ "public class Team {\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:504
+#, no-c-format
+msgid "A tokenizer is defined by its factory which is responsible for building the tokenizer and using the optional list of parameters. This example use the standard tokenizer. A filter is defined by its factory which is responsible for creating the filter instance using the optional parameters. In our example, the StopFilter filter is built reading the dedicated words property file and is expected to ignore case. The list of parameters is dependent on the tokenizer or filter factory."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:514
+#, no-c-format
+msgid "Filters are applied in the order they are defined in the <classname>@AnalyzerDef</classname> annotation. Make sure to think twice about this order."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:519
+#, no-c-format
+msgid "Once defined, an analyzer definition can be reused by an <classname>@Analyzer</classname> declaration using the definition name rather than declaring an implementation class."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:524
+#, no-c-format
+msgid "Referencing an analyzer by name"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:526
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "@AnalyzerDef(name=\"customanalyzer\", ... )\n"
+ "public class Team {\n"
+ " @Id\n"
+ " @DocumentId\n"
+ " @GeneratedValue\n"
+ " private Integer id;\n"
+ "\n"
+ " @Field\n"
+ " private String name;\n"
+ "\n"
+ " @Field\n"
+ " private String location;\n"
+ "\n"
+ " @Field <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")</emphasis>\n"
+ " private String description;\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:529
+#, no-c-format
+msgid "Analyzer instances declared by <classname>@AnalyzerDef</classname> are available by their name in the <classname>SearchFactory</classname>."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:533
+#, no-c-format
+msgid "Analyzer analyzer = fullTextSession.getSearchFactory().getAnalyzer(\"customanalyzer\");"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:535
+#, no-c-format
+msgid "This is quite useful wen building queries. Fields in queries should be analyzed with the same analyzer used to index the field so that they speak a common \"language\": the same tokens are reused between the query and the indexing process. This rule has some exceptions but is true most of the time. Respect it unless you know what you are doing."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:544
+#, no-c-format
+msgid "Available analyzers"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:546
+#, no-c-format
+msgid "Solr and Lucene come with a lot of useful default tokenizers and filters. You can find a complete list of tokenizer factories and filter factories at <ulink url=\"http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters\">http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters</ulink>. Let check a few of them."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:553
+#, no-c-format
+msgid "Some of the available tokenizers"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:558 mapping.xml:593
+#, no-c-format
+msgid "Factory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:560 mapping.xml:595
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:562 mapping.xml:597
+#, no-c-format
+msgid "parameters"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:568
+#, no-c-format
+msgid "StandardTokenizerFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:570
+#, no-c-format
+msgid "Use the Lucene StandardTokenizer"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:572 mapping.xml:581 mapping.xml:607 mapping.xml:615 mapping.xml:647
+#, no-c-format
+msgid "none"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:576
+#, no-c-format
+msgid "HTMLStripStandardTokenizerFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:578
+#, no-c-format
+msgid "Remove HTML tags, keep the text and pass it to a StandardTokenizer"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:588
+#, no-c-format
+msgid "Some of the available filters"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:603
+#, no-c-format
+msgid "StandardFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:605
+#, no-c-format
+msgid "Remove dots from acronyms and 's from words"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:611
+#, no-c-format
+msgid "LowerCaseFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:613
+#, no-c-format
+msgid "Lowercase words"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:619
+#, no-c-format
+msgid "StopFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:621
+#, no-c-format
+msgid "remove words (tokens) matching a list of stop words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:624
+#, no-c-format
+msgid "<literal>words</literal>: points to a resource file containing the stop words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:625
+#, no-c-format
+msgid "ignoreCase: true if <literal>case</literal> should be ignore when comparing stop words, <literal>false</literal> otherwise"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:631
+#, no-c-format
+msgid "SnowballPorterFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:633
+#, no-c-format
+msgid "Reduces a word to it's root in a given language. (eg. protect, protects, protection share the same root). Using such a filter allows searches matching related words."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:637
+#, no-c-format
+msgid "<literal>language</literal>: Danish, Dutch, English, Finnish, French, German, Italian, Norwegian, Portuguese, Russian, Spanish, Swedish and a few more"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:643
+#, no-c-format
+msgid "ISOLatin1AccentFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:645
+#, no-c-format
+msgid "remove accents for languages like French"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:653
+#, no-c-format
+msgid "We recommend to check all the implementations of <classname>org.apache.solr.analysis.TokenizerFactory</classname> and <classname>org.apache.solr.analysis.TokenFilterFactory</classname> in your IDE to see the implementations available."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:660
+#, no-c-format
+msgid "Analyzer discriminator (experimental)"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:662
+#, no-c-format
+msgid "So far all the introduced ways to specify an analyzer were static. However, there are use cases where it is useful to select an analyzer depending on the current state of the entity to be indexed, for example in multilingual application. For an <classname>BlogEntry</classname> class for example the analyzer could depend on the language property of the entry. Depending on this property the correct language specific stemmer should be chosen to index the actual text."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:671
+#, no-c-format
+msgid "To enable this dynamic analyzer selection Hibernate Search introduces the <classname>AnalyzerDiscriminator</classname> annotation. The following example demonstrates the usage of this annotation:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:677
+#, no-c-format
+msgid "Usage of @AnalyzerDiscriminator in order to select an analyzer depending on the entity state"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:680
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "@AnalyzerDefs({\n"
+ " @AnalyzerDef(name = \"en\",\n"
+ " tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+ " filters = {\n"
+ " @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+ " @TokenFilterDef(factory = EnglishPorterFilterFactory.class\n"
+ " )\n"
+ " }),\n"
+ " @AnalyzerDef(name = \"de\",\n"
+ " tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+ " filters = {\n"
+ " @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+ " @TokenFilterDef(factory = GermanStemFilterFactory.class)\n"
+ " })\n"
+ "})\n"
+ "public class BlogEntry {\n"
+ "\n"
+ " @Id\n"
+ " @GeneratedValue\n"
+ " @DocumentId\n"
+ " private Integer id;\n"
+ "\n"
+ " @Field\n"
+ " @AnalyzerDiscriminator(impl = LanguageDiscriminator.class)\n"
+ " private String language;\n"
+ " \n"
+ " @Field\n"
+ " private String text;\n"
+ " \n"
+ " private Set<BlogEntry> references;\n"
+ "\n"
+ " // standard getter/setter\n"
+ " ...\n"
+ "}"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:682
+#, no-c-format
+msgid ""
+ "public class LanguageDiscriminator implements Discriminator {\n"
+ "\n"
+ " public String getAnalyzerDefinitionName(Object value, Object entity, String field) {\n"
+ " if ( value == null || !( entity instanceof Article ) ) {\n"
+ " return null;\n"
+ " }\n"
+ " return (String) value;\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:685
+#, no-c-format
+msgid "The prerequisite for using <classname>@AnalyzerDiscriminator</classname> is that all analyzers which are going to be used are predefined via <classname>@AnalyzerDef</classname> definitions. If this is the case one can place the <classname>@AnalyzerDiscriminator</classname> annotation either on the class or on a specific property of the entity for which to dynamically select an analyzer. Via the <literal>impl</literal> parameter of the <classname>AnalyzerDiscriminator</classname> you specify a concrete implementation of the <classname>Discriminator</classname> interface. It is up to you to provide an implementation for this interface. The only method you have to implement is <classname>getAnalyzerDefinitionName()</classname> which gets called for each field added to the Lucene document. The entity which is getting indexed is also passed to the interface method. The <literal>value</literal> parameter is only set if the <classname>AnalyzerDiscriminator</classname> is pl!
aced on property level instead of class level. In this case the value represents the current value of this property."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:705
+#, no-c-format
+msgid "An implemention of the <classname>Discriminator</classname> interface has to return the name of an existing analyzer definition if the analyzer should be set dynamically or <classname>null</classname> if the default analyzer should not be overridden. The given example assumes that the language parameter is either 'de' or 'en' which matches the specified names in the <classname>@AnalyzerDef</classname>s."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:714
+#, no-c-format
+msgid "The <classname>@AnalyzerDiscriminator</classname> is currently still experimental and the API might still change. We are hoping for some feedback from the community about the usefulness and usability of this feature."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:722
+#, no-c-format
+msgid "Retrieving an analyzer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:724
+#, no-c-format
+msgid "During indexing time, Hibernate Search is using analyzers under the hood for you. In some situations, retrieving analyzers can be handy. If your domain model makes use of multiple analyzers (maybe to benefit from stemming, use phonetic approximation and so on), you need to make sure to use the same analyzers when you build your query."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:732
+#, no-c-format
+msgid "This rule can be broken but you need a good reason for it. If you are unsure, use the same analyzers."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:736
+#, no-c-format
+msgid "You can retrieve the scoped analyzer for a given entity used at indexing time by Hibernate Search. A scoped analyzer is an analyzer which applies the right analyzers depending on the field indexed: multiple analyzers can be defined on a given entity each one working on an individual field, a scoped analyzer unify all these analyzers into a context-aware analyzer. While the theory seems a bit complex, using the right analyzer in a query is very easy."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:745
+#, no-c-format
+msgid "Using the scoped analyzer when building a full-text query"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:748
+#, no-c-format
+msgid ""
+ "org.apache.lucene.queryParser.QueryParser parser = new QueryParser(\n"
+ " \"title\", \n"
+ " fullTextSession.getSearchFactory().getAnalyzer( Song.class )\n"
+ ");\n"
+ "\n"
+ "org.apache.lucene.search.Query luceneQuery = \n"
+ " parser.parse( \"title:sky Or title_stemmed:diamond\" );\n"
+ "\n"
+ "org.hibernate.Query fullTextQuery = \n"
+ " fullTextSession.createFullTextQuery( luceneQuery, Song.class );\n"
+ "\n"
+ "List result = fullTextQuery.list(); //return a list of managed objects"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:751
+#, no-c-format
+msgid "In the example above, the song title is indexed in two fields: the standard analyzer is used in the field <literal>title</literal> and a stemming analyzer is used in the field <literal>title_stemmed</literal>. By using the analyzer provided by the search factory, the query uses the appropriate analyzer depending on the field targeted."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:758
+#, no-c-format
+msgid "If your query targets more that one query and you wish to use your standard analyzer, make sure to describe it using an analyzer definition. You can retrieve analyzers by their definition name using <code>searchFactory.getAnalyzer(String)</code>."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:767
+#, no-c-format
+msgid "Property/Field Bridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:769
+#, no-c-format
+msgid "In Lucene all index fields have to be represented as Strings. For this reason all entity properties annotated with <literal>@Field</literal> have to be indexed in a String form. For most of your properties, Hibernate Search does the translation job for you thanks to a built-in set of bridges. In some cases, though you need a more fine grain control over the translation process."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:777
+#, no-c-format
+msgid "Built-in bridges"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:779
+#, no-c-format
+msgid "Hibernate Search comes bundled with a set of built-in bridges between a Java property type and its full text representation."
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:784
+#, no-c-format
+msgid "null"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:787
+#, no-c-format
+msgid "null elements are not indexed. Lucene does not support null elements and this does not make much sense either."
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:793
+#, no-c-format
+msgid "java.lang.String"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:796
+#, no-c-format
+msgid "String are indexed as is"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:801
+#, no-c-format
+msgid "short, Short, integer, Integer, long, Long, float, Float, double, Double, BigInteger, BigDecimal"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:805
+#, no-c-format
+msgid "Numbers are converted in their String representation. Note that numbers cannot be compared by Lucene (ie used in ranged queries) out of the box: they have to be padded"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:808
+#, no-c-format
+msgid "Using a Range query is debatable and has drawbacks, an alternative approach is to use a Filter query which will filter the result query to the appropriate range."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:812
+#, no-c-format
+msgid "Hibernate Search will support a padding mechanism"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:818
+#, no-c-format
+msgid "java.util.Date"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:821
+#, no-c-format
+msgid "Dates are stored as yyyyMMddHHmmssSSS in GMT time (200611072203012 for Nov 7th of 2006 4:03PM and 12ms EST). You shouldn't really bother with the internal format. What is important is that when using a DateRange Query, you should know that the dates have to be expressed in GMT time."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:827
+#, no-c-format
+msgid "Usually, storing the date up to the millisecond is not necessary. <literal>@DateBridge</literal> defines the appropriate resolution you are willing to store in the index ( <literal> <literal>@DateBridge(resolution=Resolution.DAY)</literal> </literal> ). The date pattern will then be truncated accordingly."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:834
+#, no-c-format
+msgid ""
+ "@Entity \n"
+ "@Indexed\n"
+ "public class Meeting {\n"
+ " @Field(index=Index.UN_TOKENIZED)\n"
+ " <emphasis role=\"bold\">@DateBridge(resolution=Resolution.MINUTE)</emphasis>\n"
+ " private Date date;\n"
+ " ..."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:837
+#, no-c-format
+msgid "A Date whose resolution is lower than <literal>MILLISECOND</literal> cannot be a <literal>@DocumentId</literal>"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:845
+#, no-c-format
+msgid "java.net.URI, java.net.URL"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:848
+#, no-c-format
+msgid "URI and URL are converted to their string representation"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:854
+#, no-c-format
+msgid "java.lang.Class"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:857
+#, no-c-format
+msgid "Class are converted to their fully qualified class name. The thread context classloader is used when the class is rehydrated"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:866
+#, no-c-format
+msgid "Custom Bridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:868
+#, no-c-format
+msgid "Sometimes, the built-in bridges of Hibernate Search do not cover some of your property types, or the String representation used by the bridge does not meet your requirements. The following paragraphs describe several solutions to this problem."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:874
+#, no-c-format
+msgid "StringBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:876
+#, no-c-format
+msgid "The simplest custom solution is to give Hibernate Search an implementation of your expected <emphasis><classname>Object</classname> </emphasis>to <classname>String</classname> bridge. To do so you need to implements the <literal>org.hibernate.search.bridge.StringBridge</literal> interface. All implementations have to be thread-safe as they are used concurrently."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:885
+#, no-c-format
+msgid "Implementing your own <classname>StringBridge</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:888
+#, no-c-format
+msgid ""
+ "/**\n"
+ " * Padding Integer bridge.\n"
+ " * All numbers will be padded with 0 to match 5 digits\n"
+ " *\n"
+ " * @author Emmanuel Bernard\n"
+ " */\n"
+ "public class PaddedIntegerBridge implements <emphasis role=\"bold\">StringBridge</emphasis> {\n"
+ "\n"
+ " private int PADDING = 5;\n"
+ "\n"
+ " <emphasis role=\"bold\">public String objectToString(Object object)</emphasis> {\n"
+ " String rawInteger = ( (Integer) object ).toString();\n"
+ " if (rawInteger.length() > PADDING) \n"
+ " throw new IllegalArgumentException( \"Try to pad on a number too big\" );\n"
+ " StringBuilder paddedInteger = new StringBuilder( );\n"
+ " for ( int padIndex = rawInteger.length() ; padIndex < PADDING ; padIndex++ ) {\n"
+ " paddedInteger.append('0');\n"
+ " }\n"
+ " return paddedInteger.append( rawInteger ).toString();\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:891
+#, no-c-format
+msgid "Then any property or field can use this bridge thanks to the <literal>@FieldBridge</literal> annotation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:894
+#, no-c-format
+msgid ""
+ "<emphasis role=\"bold\">@FieldBridge(impl = PaddedIntegerBridge.class)</emphasis>\n"
+ "private Integer length;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:896
+#, no-c-format
+msgid "Parameters can be passed to the Bridge implementation making it more flexible. The Bridge implementation implements a <classname>ParameterizedBridge</classname> interface, and the parameters are passed through the <literal>@FieldBridge</literal> annotation."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:903
+#, no-c-format
+msgid "Passing parameters to your bridge implementation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:905
+#, no-c-format
+msgid ""
+ "public class PaddedIntegerBridge implements StringBridge, <emphasis\n"
+ " role=\"bold\">ParameterizedBridge</emphasis> {\n"
+ "\n"
+ " public static String PADDING_PROPERTY = \"padding\";\n"
+ " private int padding = 5; //default\n"
+ "\n"
+ " <emphasis role=\"bold\">public void setParameterValues(Map parameters)</emphasis> {\n"
+ " Object padding = parameters.get( PADDING_PROPERTY );\n"
+ " if (padding != null) this.padding = (Integer) padding;\n"
+ " }\n"
+ "\n"
+ " public String objectToString(Object object) {\n"
+ " String rawInteger = ( (Integer) object ).toString();\n"
+ " if (rawInteger.length() > padding) \n"
+ " throw new IllegalArgumentException( \"Try to pad on a number too big\" );\n"
+ " StringBuilder paddedInteger = new StringBuilder( );\n"
+ " for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {\n"
+ " paddedInteger.append('0');\n"
+ " }\n"
+ " return paddedInteger.append( rawInteger ).toString();\n"
+ " }\n"
+ "}\n"
+ "\n"
+ "\n"
+ "//property\n"
+ "@FieldBridge(impl = PaddedIntegerBridge.class,\n"
+ " <emphasis role=\"bold\">params = @Parameter(name=\"padding\", value=\"10\")</emphasis>\n"
+ " )\n"
+ "private Integer length;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:908
+#, no-c-format
+msgid "The <classname>ParameterizedBridge</classname> interface can be implemented by <classname>StringBridge</classname>, <classname>TwoWayStringBridge</classname>, <classname>FieldBridge</classname> implementations."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:913
+#, no-c-format
+msgid "All implementations have to be thread-safe, but the parameters are set during initialization and no special care is required at this stage."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:917
+#, no-c-format
+msgid "If you expect to use your bridge implementation on an id property (ie annotated with <literal>@DocumentId</literal> ), you need to use a slightly extended version of <literal>StringBridge</literal> named <classname>TwoWayStringBridge</classname>. Hibernate Search needs to read the string representation of the identifier and generate the object out of it. There is not difference in the way the <literal>@FieldBridge</literal> annotation is used."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:926
+#, no-c-format
+msgid "Implementing a TwoWayStringBridge which can for example be used for id properties"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:929
+#, no-c-format
+msgid ""
+ "public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {\n"
+ "\n"
+ " public static String PADDING_PROPERTY = \"padding\";\n"
+ " private int padding = 5; //default\n"
+ "\n"
+ " public void setParameterValues(Map parameters) {\n"
+ " Object padding = parameters.get( PADDING_PROPERTY );\n"
+ " if (padding != null) this.padding = (Integer) padding;\n"
+ " }\n"
+ "\n"
+ " public String objectToString(Object object) {\n"
+ " String rawInteger = ( (Integer) object ).toString();\n"
+ " if (rawInteger.length() > padding) \n"
+ " throw new IllegalArgumentException( \"Try to pad on a number too big\" );\n"
+ " StringBuilder paddedInteger = new StringBuilder( );\n"
+ " for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {\n"
+ " paddedInteger.append('0');\n"
+ " }\n"
+ " return paddedInteger.append( rawInteger ).toString();\n"
+ " }\n"
+ "\n"
+ " <emphasis role=\"bold\">public Object stringToObject(String stringValue)</emphasis> {\n"
+ " return new Integer(stringValue);\n"
+ " }\n"
+ "}\n"
+ "\n"
+ "\n"
+ "//id property\n"
+ "@DocumentId\n"
+ "@FieldBridge(impl = PaddedIntegerBridge.class,\n"
+ " params = @Parameter(name=\"padding\", value=\"10\") \n"
+ "private Integer id;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:932
+#, no-c-format
+msgid "It is critically important for the two-way process to be idempotent (ie object = stringToObject( objectToString( object ) ) )."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:938
+#, no-c-format
+msgid "FieldBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:940
+#, no-c-format
+msgid "Some use cases require more than a simple object to string translation when mapping a property to a Lucene index. To give you the greatest possible flexibility you can also implement a bridge as a <classname>FieldBridge</classname>. This interface gives you a property value and let you map it the way you want in your Lucene <classname>Document</classname>.The interface is very similar in its concept to the Hibernate<classname> UserType</classname>s."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:948
+#, no-c-format
+msgid "You can for example store a given property in two different document fields:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:952
+#, no-c-format
+msgid "Implementing the FieldBridge interface in order to a given property into multiple document fields"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:955
+#, no-c-format
+msgid ""
+ "/**\n"
+ " * Store the date in 3 different fields - year, month, day - to ease Range Query per\n"
+ " * year, month or day (eg get all the elements of December for the last 5 years).\n"
+ " * \n"
+ " * @author Emmanuel Bernard\n"
+ " */\n"
+ "public class DateSplitBridge implements FieldBridge {\n"
+ " private final static TimeZone GMT = TimeZone.getTimeZone(\"GMT\");\n"
+ "\n"
+ " <emphasis role=\"bold\">public void set(String name, Object value, Document document, \n"
+ " LuceneOptions luceneOptions)</emphasis> {\n"
+ " Date date = (Date) value;\n"
+ " Calendar cal = GregorianCalendar.getInstance(GMT);\n"
+ " cal.setTime(date);\n"
+ " int year = cal.get(Calendar.YEAR);\n"
+ " int month = cal.get(Calendar.MONTH) + 1;\n"
+ " int day = cal.get(Calendar.DAY_OF_MONTH);\n"
+ " \n"
+ " // set year\n"
+ " Field field = new Field(name + \".year\", String.valueOf(year),\n"
+ " luceneOptions.getStore(), luceneOptions.getIndex(),\n"
+ " luceneOptions.getTermVector());\n"
+ " field.setBoost(luceneOptions.getBoost());\n"
+ " document.add(field);\n"
+ " \n"
+ " // set month and pad it if needed\n"
+ " field = new Field(name + \".month\", month < 10 ? \"0\" : \"\"\n"
+ " + String.valueOf(month), luceneOptions.getStore(),\n"
+ " luceneOptions.getIndex(), luceneOptions.getTermVector());\n"
+ " field.setBoost(luceneOptions.getBoost());\n"
+ " document.add(field);\n"
+ " \n"
+ " // set day and pad it if needed\n"
+ " field = new Field(name + \".day\", day < 10 ? \"0\" : \"\"\n"
+ " + String.valueOf(day), luceneOptions.getStore(),\n"
+ " luceneOptions.getIndex(), luceneOptions.getTermVector());\n"
+ " field.setBoost(luceneOptions.getBoost());\n"
+ " document.add(field);\n"
+ " }\n"
+ "}\n"
+ "\n"
+ "//property\n"
+ "<emphasis role=\"bold\">@FieldBridge(impl = DateSplitBridge.class)</emphasis>\n"
+ "private Date date;"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:960
+#, no-c-format
+msgid "ClassBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:962
+#, no-c-format
+msgid "It is sometimes useful to combine more than one property of a given entity and index this combination in a specific way into the Lucene index. The <classname>@ClassBridge</classname> and <classname>@ClassBridge</classname> annotations can be defined at the class level (as opposed to the property level). In this case the custom field bridge implementation receives the entity instance as the value parameter instead of a particular property. Though not shown in this example, <classname>@ClassBridge</classname> supports the <methodname>termVector</methodname> attribute discussed in section <xref linkend=\"basic-mapping\"/>."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:974
+#, no-c-format
+msgid "Implementing a class bridge"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:976
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "<emphasis role=\"bold\">@ClassBridge</emphasis>(name=\"branchnetwork\",\n"
+ " index=Index.TOKENIZED,\n"
+ " store=Store.YES,\n"
+ " impl = <emphasis role=\"bold\">CatFieldsClassBridge.class</emphasis>,\n"
+ " params = @Parameter( name=\"sepChar\", value=\" \" ) )\n"
+ "public class Department {\n"
+ " private int id;\n"
+ " private String network;\n"
+ " private String branchHead;\n"
+ " private String branch;\n"
+ " private Integer maxEmployees\n"
+ " ...\n"
+ "}\n"
+ "\n"
+ "\n"
+ "public class CatFieldsClassBridge implements FieldBridge, ParameterizedBridge {\n"
+ " private String sepChar;\n"
+ "\n"
+ " public void setParameterValues(Map parameters) {\n"
+ " this.sepChar = (String) parameters.get( \"sepChar\" );\n"
+ " }\n"
+ "\n"
+ " <emphasis role=\"bold\">public void set(String name, Object value, Document document, LuceneOptions luceneOptions)</emphasis> {\n"
+ " // In this particular class the name of the new field was passed\n"
+ " // from the name field of the ClassBridge Annotation. This is not\n"
+ " // a requirement. It just works that way in this instance. The\n"
+ " // actual name could be supplied by hard coding it below.\n"
+ " Department dep = (Department) value;\n"
+ " String fieldValue1 = dep.getBranch();\n"
+ " if ( fieldValue1 == null ) {\n"
+ " fieldValue1 = \"\";\n"
+ " }\n"
+ " String fieldValue2 = dep.getNetwork();\n"
+ " if ( fieldValue2 == null ) {\n"
+ " fieldValue2 = \"\";\n"
+ " }\n"
+ " String fieldValue = fieldValue1 + sepChar + fieldValue2;\n"
+ " Field field = new Field( name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );\n"
+ " field.setBoost( luceneOptions.getBoost() );\n"
+ " document.add( field );\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:979
+#, no-c-format
+msgid "In this example, the particular <classname>CatFieldsClassBridge</classname> is applied to the <literal>department</literal> instance, the field bridge then concatenate both branch and network and index the concatenation."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:989 mapping.xml:1014
+#, no-c-format
+msgid "Providing your own id"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:992
+#, no-c-format
+msgid "This part of the documentation is a work in progress."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:995
+#, no-c-format
+msgid "You can provide your own id for Hibernate Search if you are extending the internals. You will have to generate a unique value so it can be given to Lucene to be indexed. This will have to be given to Hibernate Search when you create an org.hibernate.search.Work object - the document id is required in the constructor."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:1002
+#, no-c-format
+msgid "The ProvidedId annotation"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:1004
+#, no-c-format
+msgid "Unlike conventional Hibernate Search API and @DocumentId, this annotation is used on the class and not a field. You also can provide your own bridge implementation when you put in this annotation by calling the bridge() which is on @ProvidedId. Also, if you annotate a class with @ProvidedId, your subclasses will also get the annotation - but it is not done by using the java.lang.annotations.@Inherited. Be sure however, to <emphasis>not</emphasis> use this annotation with @DocumentId as your system will break."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:1016
+#, no-c-format
+msgid ""
+ "@ProvidedId (bridge = org.my.own.package.MyCustomBridge)\n"
+ "@Indexed\n"
+ "public class MyClass{\n"
+ " @Field\n"
+ " String MyString;\n"
+ " ...\n"
+ "}"
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/mapping.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/optimize.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/optimize.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/optimize.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,175 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: optimize.xml:30
+#, no-c-format
+msgid "Index Optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:32
+#, no-c-format
+msgid "From time to time, the Lucene index needs to be optimized. The process is essentially a defragmentation. Until an optimization is triggered Lucene only marks deleted documents as such, no physical deletions are applied. During the optimization process the deletions will be applied which also effects the number of files in the Lucene Directory."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:38
+#, no-c-format
+msgid "Optimizing the Lucene index speeds up searches but has no effect on the indexation (update) performance. During an optimization, searches can be performed, but will most likely be slowed down. All index updates will be stopped. It is recommended to schedule optimization:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:45
+#, no-c-format
+msgid "on an idle system or when the searches are less frequent"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:49
+#, no-c-format
+msgid "after a lot of index modifications"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:54
+#, no-c-format
+msgid "Automatic optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:56
+#, no-c-format
+msgid "Hibernate Search can automatically optimize an index after:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:60
+#, no-c-format
+msgid "a certain amount of operations (insertion, deletion)"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:64
+#, no-c-format
+msgid "or a certain amount of transactions"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:68
+#, no-c-format
+msgid "The configuration for automatic index optimization can be defined on a global level or per index:"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:72
+#, no-c-format
+msgid "Defining automatic optimization parameters"
+msgstr ""
+
+#. Tag: programlisting
+#: optimize.xml:74
+#, no-c-format
+msgid ""
+ "hibernate.search.default.optimizer.operation_limit.max = 1000\n"
+ "hibernate.search.default.optimizer.transaction_limit.max = 100\n"
+ "hibernate.search.Animal.optimizer.transaction_limit.max = 50"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:77
+#, no-c-format
+msgid "An optimization will be triggered to the <literal>Animal</literal> index as soon as either:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:82
+#, no-c-format
+msgid "the number of additions and deletions reaches 1000"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:86
+#, no-c-format
+msgid "the number of transactions reaches 50 (<constant>hibernate.search.Animal.optimizer.transaction_limit.max</constant> having priority over <constant>hibernate.search.default.optimizer.transaction_limit.max</constant>)"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:93
+#, no-c-format
+msgid "If none of these parameters are defined, no optimization is processed automatically."
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:98
+#, no-c-format
+msgid "Manual optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:100
+#, no-c-format
+msgid "You can programmatically optimize (defragment) a Lucene index from Hibernate Search through the <classname>SearchFactory</classname>:"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:104
+#, no-c-format
+msgid "Programmatic index optimization"
+msgstr ""
+
+#. Tag: programlisting
+#: optimize.xml:106
+#, no-c-format
+msgid ""
+ "FullTextSession fullTextSession = Search.getFullTextSession(regularSession);\n"
+ "SearchFactory searchFactory = fullTextSession.getSearchFactory();\n"
+ "\n"
+ "searchFactory.optimize(Order.class);\n"
+ "// or\n"
+ "searchFactory.optimize();"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:109
+#, no-c-format
+msgid "The first example optimizes the Lucene index holding <classname>Order</classname>s; the second, optimizes all indexes."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:113
+#, no-c-format
+msgid "<literal>searchFactory.optimize()</literal> has no effect on a JMS backend. You must apply the optimize operation on the Master node."
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:120
+#, no-c-format
+msgid "Adjusting optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:122
+#, no-c-format
+msgid "Apache Lucene has a few parameters to influence how optimization is performed. Hibernate Search exposes those parameters."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:125
+#, no-c-format
+msgid "Further index optimization parameters include: <itemizedlist> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_buffered_docs</literal> </listitem> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_field_length</literal> </listitem> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].max_merge_docs</literal> </listitem> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].merge_factor</literal> </listitem> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].ram_buffer_size</literal> </listitem> <listitem> <literal>hibernate.search.[default|<indexname>].indexwriter.[batch|transaction].term_index_interval</literal> </listitem> </itemizedlist> See <xref linkend=\"lucene-indexing-performance\"/> for more details."
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/optimize.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/pot/modules/query.pot
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/pot/modules/query.pot (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/pot/modules/query.pot 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1048 @@
+# SOME DESCRIPTIVE TITLE.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <kde-i18n-doc(a)kde.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: application/x-xml2pot; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: query.xml:30
+#, no-c-format
+msgid "Querying"
+msgstr ""
+
+#. Tag: para
+#: query.xml:32
+#, no-c-format
+msgid "The second most important capability of Hibernate Search is the ability to execute a Lucene query and retrieve entities managed by an Hibernate session, providing the power of Lucene without leaving the Hibernate paradigm, and giving another dimension to the Hibernate classic search mechanisms (HQL, Criteria query, native SQL query). Preparing and executing a query consists of four simple steps:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:41
+#, no-c-format
+msgid "Creating a <classname>FullTextSession</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:45
+#, no-c-format
+msgid "<para>Creating a Lucene query</para>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:49
+#, no-c-format
+msgid "Wrapping the Lucene query using a <classname>org.hibernate.Query</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:54
+#, no-c-format
+msgid "Executing the search by calling for example <methodname>list()</methodname> or <methodname>scroll()</methodname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:60
+#, no-c-format
+msgid "To access the querying facilities, you have to use an <classname>FullTextSession</classname>. This Search specific session wraps a regular <classname>org.hibernate.Session</classname> to provide query and indexing capabilities."
+msgstr ""
+
+#. Tag: title
+#: query.xml:66
+#, no-c-format
+msgid "Creating a FullTextSession"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:68
+#, no-c-format
+msgid ""
+ "Session session = sessionFactory.openSession();\n"
+ "...\n"
+ "FullTextSession fullTextSession = Search.getFullTextSession(session);"
+msgstr ""
+
+#. Tag: para
+#: query.xml:71
+#, no-c-format
+msgid "The actual search facility is built on native Lucene queries which the following example illustrates."
+msgstr ""
+
+#. Tag: title
+#: query.xml:75
+#, no-c-format
+msgid "<title>Creating a Lucene query</title>"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:77
+#, no-c-format
+msgid ""
+ "org.apache.lucene.queryParser.QueryParser parser = \n"
+ " new QueryParser(\"title\", new StopAnalyzer() );\n"
+ "\n"
+ "org.apache.lucene.search.Query luceneQuery = parser.parse( \"summary:Festina Or brand:Seiko\" );\n"
+ "<emphasis role=\"bold\">org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );\n"
+ " </emphasis>\n"
+ "List result = fullTextQuery.list(); //return a list of managed objects"
+msgstr ""
+
+#. Tag: para
+#: query.xml:80
+#, no-c-format
+msgid "The Hibernate query built on top of the Lucene query is a regular <literal>org.hibernate.Query</literal>, which means you are in the same paradigm as the other Hibernate query facilities (HQL, Native or Criteria). The regular <literal>list()</literal> , <literal>uniqueResult()</literal>, <literal>iterate()</literal> and <literal>scroll()</literal> methods can be used."
+msgstr ""
+
+#. Tag: para
+#: query.xml:87
+#, no-c-format
+msgid "In case you are using the Java Persistence APIs of Hibernate (aka EJB 3.0 Persistence), the same extensions exist:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:91
+#, no-c-format
+msgid "Creating a Search query using the JPA API"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:93
+#, no-c-format
+msgid ""
+ "EntityManager em = entityManagerFactory.createEntityManager();\n"
+ "\n"
+ "FullTextEntityManager fullTextEntityManager = \n"
+ " org.hibernate.hibernate.search.jpa.Search.getFullTextEntityManager(em);\n"
+ "\n"
+ "...\n"
+ "org.apache.lucene.queryParser.QueryParser parser = \n"
+ " new QueryParser(\"title\", new StopAnalyzer() );\n"
+ "\n"
+ "org.apache.lucene.search.Query luceneQuery = parser.parse( \"summary:Festina Or brand:Seiko\" );\n"
+ "<emphasis role=\"bold\">javax.persistence.Query fullTextQuery = fullTextEntityManager.createFullTextQuery( luceneQuery );</emphasis>\n"
+ "\n"
+ "List result = fullTextQuery.getResultList(); //return a list of managed objects"
+msgstr ""
+
+#. Tag: para
+#: query.xml:96
+#, no-c-format
+msgid "The following examples we will use the Hibernate APIs but the same example can be easily rewritten with the Java Persistence API by just adjusting the way the <classname>FullTextQuery</classname> is retrieved."
+msgstr ""
+
+#. Tag: title
+#: query.xml:102
+#, no-c-format
+msgid "Building queries"
+msgstr ""
+
+#. Tag: para
+#: query.xml:104
+#, no-c-format
+msgid "Hibernate Search queries are built on top of Lucene queries which gives you total freedom on the type of Lucene query you want to execute. However, once built, Hibernate Search wraps further query processing using <classname>org.hibernate.Query</classname> as your primary query manipulation API."
+msgstr ""
+
+#. Tag: title
+#: query.xml:111
+#, no-c-format
+msgid "Building a Lucene query"
+msgstr ""
+
+#. Tag: para
+#: query.xml:113
+#, no-c-format
+msgid "It is out of the scope of this documentation on how to exactly build a Lucene query. Please refer to the online Lucene documentation or get hold of a copy of either Lucene In Action or Hibernate Search in Action."
+msgstr ""
+
+#. Tag: title
+#: query.xml:120
+#, no-c-format
+msgid "Building a Hibernate Search query"
+msgstr ""
+
+#. Tag: title
+#: query.xml:123
+#, no-c-format
+msgid "Generality"
+msgstr ""
+
+#. Tag: para
+#: query.xml:125
+#, no-c-format
+msgid "Once the Lucene query is built, it needs to be wrapped into an Hibernate Query."
+msgstr ""
+
+#. Tag: title
+#: query.xml:129
+#, no-c-format
+msgid "Wrapping a Lucene query into a Hibernate Query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:131
+#, no-c-format
+msgid ""
+ "FullTextSession fullTextSession = Search.getFullTextSession( session );\n"
+ "org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:134
+#, no-c-format
+msgid "If not specified otherwise, the query will be executed against all indexed entities, potentially returning all types of indexed classes. It is advised, from a performance point of view, to restrict the returned types:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:140
+#, no-c-format
+msgid "Filtering the search result by entity type"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:142
+#, no-c-format
+msgid ""
+ "org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Customer.class );\n"
+ "// or\n"
+ "fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Item.class, Actor.class );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:145
+#, no-c-format
+msgid "The first example returns only matching <classname>Customer</classname>s, the second returns matching <classname>Actor</classname>s and <classname>Item</classname>s. The type restriction is fully polymorphic which means that if there are two indexed subclasses <classname>Salesman</classname> and <classname>Customer</classname> of the baseclass <classname>Person</classname>, it is possible to just specify <classname>Person.class</classname> in order to filter on result types."
+msgstr ""
+
+#. Tag: title
+#: query.xml:157
+#, no-c-format
+msgid "Pagination"
+msgstr ""
+
+#. Tag: para
+#: query.xml:159
+#, no-c-format
+msgid "Out of performance reasons it is recommended to restrict the number of returned objects per query. In fact is a very common use case anyway that the user navigates from one page to an other. The way to define pagination is exactly the way you would define pagination in a plain HQL or Criteria query."
+msgstr ""
+
+#. Tag: title
+#: query.xml:166
+#, no-c-format
+msgid "Defining pagination for a search query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:168
+#, no-c-format
+msgid ""
+ "org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Customer.class );\n"
+ "fullTextQuery.setFirstResult(15); //start from the 15th element\n"
+ "fullTextQuery.setMaxResults(10); //return 10 elements"
+msgstr ""
+
+#. Tag: para
+#: query.xml:172
+#, no-c-format
+msgid "It is still possible to get the total number of matching elements regardless of the pagination via <methodname>fulltextQuery.</methodname><methodname>getResultSize()</methodname>"
+msgstr ""
+
+#. Tag: title
+#: query.xml:179
+#, no-c-format
+msgid "Sorting"
+msgstr ""
+
+#. Tag: para
+#: query.xml:181
+#, no-c-format
+msgid "Apache Lucene provides a very flexible and powerful way to sort results. While the default sorting (by relevance) is appropriate most of the time, it can be interesting to sort by one or several other properties. In order to do so set the Lucene Sort object to apply a Lucene sorting strategy."
+msgstr ""
+
+#. Tag: title
+#: query.xml:188
+#, no-c-format
+msgid "Specifying a Lucene <classname>Sort</classname> in order to sort the results"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:191
+#, no-c-format
+msgid ""
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( query, Book.class );\n"
+ "org.apache.lucene.search.Sort sort = new Sort(new SortField(\"title\"));\n"
+ "<emphasis role=\"bold\">query.setSort(sort);</emphasis>\n"
+ "List results = query.list();"
+msgstr ""
+
+#. Tag: para
+#: query.xml:194
+#, no-c-format
+msgid "One can notice the <classname>FullTextQuery</classname> interface which is a sub interface of <classname>org.hibernate.Query</classname>. Be aware that fields used for sorting must not be tokenized."
+msgstr ""
+
+#. Tag: title
+#: query.xml:201
+#, no-c-format
+msgid "Fetching strategy"
+msgstr ""
+
+#. Tag: para
+#: query.xml:203
+#, no-c-format
+msgid "When you restrict the return types to one class, Hibernate Search loads the objects using a single query. It also respects the static fetching strategy defined in your domain model."
+msgstr ""
+
+#. Tag: para
+#: query.xml:207
+#, no-c-format
+msgid "It is often useful, however, to refine the fetching strategy for a specific use case."
+msgstr ""
+
+#. Tag: title
+#: query.xml:211
+#, no-c-format
+msgid "Specifying <classname>FetchMode</classname> on a query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:214
+#, no-c-format
+msgid ""
+ "Criteria criteria = s.createCriteria( Book.class ).setFetchMode( \"authors\", FetchMode.JOIN );\n"
+ "s.createFullTextQuery( luceneQuery ).setCriteriaQuery( criteria );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:217
+#, no-c-format
+msgid "In this example, the query will return all Books matching the luceneQuery. The authors collection will be loaded from the same query using an SQL outer join."
+msgstr ""
+
+#. Tag: para
+#: query.xml:221
+#, no-c-format
+msgid "When defining a criteria query, it is not needed to restrict the entity types returned while creating the Hibernate Search query from the full text session: the type is guessed from the criteria query itself. Only fetch mode can be adjusted, refrain from applying any other restriction."
+msgstr ""
+
+#. Tag: para
+#: query.xml:227
+#, no-c-format
+msgid "One cannot use <methodname>setCriteriaQuery</methodname> if more than one entity type is expected to be returned."
+msgstr ""
+
+#. Tag: title
+#: query.xml:232
+#, no-c-format
+msgid "Projection"
+msgstr ""
+
+#. Tag: para
+#: query.xml:234
+#, no-c-format
+msgid "For some use cases, returning the domain object (graph) is overkill. Only a small subset of the properties is necessary. Hibernate Search allows you to return a subset of properties:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:239
+#, no-c-format
+msgid "Using projection instead of returning the full domain object"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:242
+#, no-c-format
+msgid ""
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );\n"
+ "query.<emphasis role=\"bold\">setProjection( \"id\", \"summary\", \"body\", \"mainAuthor.name\" )</emphasis>;\n"
+ "List results = query.list();\n"
+ "Object[] firstResult = (Object[]) results.get(0);\n"
+ "Integer id = firstResult[0];\n"
+ "String summary = firstResult[1];\n"
+ "String body = firstResult[2];\n"
+ "String authorName = firstResult[3];"
+msgstr ""
+
+#. Tag: para
+#: query.xml:245
+#, no-c-format
+msgid "Hibernate Search extracts the properties from the Lucene index and convert them back to their object representation, returning a list of <classname>Object[]</classname>. Projections avoid a potential database round trip (useful if the query response time is critical), but has some constraints:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:253
+#, no-c-format
+msgid "the properties projected must be stored in the index (<literal>@Field(store=Store.YES)</literal>), which increase the index size"
+msgstr ""
+
+#. Tag: para
+#: query.xml:259
+#, no-c-format
+msgid "the properties projected must use a <literal>FieldBridge</literal> implementing <classname>org.hibernate.search.bridge.TwoWayFieldBridge</classname> or <literal>org.hibernate.search.bridge.TwoWayStringBridge</literal>, the latter being the simpler version. All Hibernate Search built-in types are two-way."
+msgstr ""
+
+#. Tag: para
+#: query.xml:269
+#, no-c-format
+msgid "you can only project simple properties of the indexed entity or its embedded associations. This means you cannot project a whole embedded entity."
+msgstr ""
+
+#. Tag: para
+#: query.xml:275
+#, no-c-format
+msgid "projection does not work on collections or maps which are indexed via <classname>@IndexedEmbedded</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:280
+#, no-c-format
+msgid "Projection is useful for another kind of use cases. Lucene provides some metadata information to the user about the results. By using some special placeholders, the projection mechanism can retrieve them:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:286
+#, no-c-format
+msgid "Using projection in order to retrieve meta data"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:288
+#, no-c-format
+msgid ""
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );\n"
+ "query.<emphasis role=\"bold\">setProjection( FullTextQuery.SCORE, FullTextQuery.THIS, \"mainAuthor.name\" )</emphasis>;\n"
+ "List results = query.list();\n"
+ "Object[] firstResult = (Object[]) results.get(0);\n"
+ "float score = firstResult[0];\n"
+ "Book book = firstResult[1];\n"
+ "String authorName = firstResult[2];"
+msgstr ""
+
+#. Tag: para
+#: query.xml:291
+#, no-c-format
+msgid "You can mix and match regular fields and special placeholders. Here is the list of available placeholders:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:296
+#, no-c-format
+msgid "FullTextQuery.THIS: returns the initialized and managed entity (as a non projected query would have done)."
+msgstr ""
+
+#. Tag: para
+#: query.xml:301
+#, no-c-format
+msgid "FullTextQuery.DOCUMENT: returns the Lucene Document related to the object projected."
+msgstr ""
+
+#. Tag: para
+#: query.xml:306
+#, no-c-format
+msgid "FullTextQuery.OBJECT_CLASS: returns the class of the indexed entity."
+msgstr ""
+
+#. Tag: para
+#: query.xml:311
+#, no-c-format
+msgid "FullTextQuery.SCORE: returns the document score in the query. Scores are handy to compare one result against an other for a given query but are useless when comparing the result of different queries."
+msgstr ""
+
+#. Tag: para
+#: query.xml:318
+#, no-c-format
+msgid "FullTextQuery.ID: the id property value of the projected object."
+msgstr ""
+
+#. Tag: para
+#: query.xml:323
+#, no-c-format
+msgid "FullTextQuery.DOCUMENT_ID: the Lucene document id. Careful, Lucene document id can change overtime between two different IndexReader opening (this feature is experimental)."
+msgstr ""
+
+#. Tag: para
+#: query.xml:329
+#, no-c-format
+msgid "FullTextQuery.EXPLANATION: returns the Lucene Explanation object for the matching object/document in the given query. Do not use if you retrieve a lot of data. Running explanation typically is as costly as running the whole Lucene query per matching element. Make sure you use projection!"
+msgstr ""
+
+#. Tag: title
+#: query.xml:341
+#, no-c-format
+msgid "Retrieving the results"
+msgstr ""
+
+#. Tag: para
+#: query.xml:343
+#, no-c-format
+msgid "Once the Hibernate Search query is built, executing it is in no way different than executing a HQL or Criteria query. The same paradigm and object semantic applies. All the common operations are available: <methodname>list()</methodname>, <methodname>uniqueResult()</methodname>, <methodname>iterate()</methodname>, <methodname>scroll()</methodname>."
+msgstr ""
+
+#. Tag: title
+#: query.xml:351
+#, no-c-format
+msgid "Performance considerations"
+msgstr ""
+
+#. Tag: para
+#: query.xml:353
+#, no-c-format
+msgid "If you expect a reasonable number of results (for example using pagination) and expect to work on all of them, <methodname>list()</methodname> or <methodname>uniqueResult()</methodname> are recommended. <methodname>list()</methodname> work best if the entity <literal>batch-size</literal> is set up properly. Note that Hibernate Search has to process all Lucene Hits elements (within the pagination) when using <methodname>list()</methodname> , <methodname>uniqueResult()</methodname> and <methodname>iterate()</methodname>."
+msgstr ""
+
+#. Tag: para
+#: query.xml:364
+#, no-c-format
+msgid "If you wish to minimize Lucene document loading, <methodname>scroll()</methodname> is more appropriate. Don't forget to close the <classname>ScrollableResults</classname> object when you're done, since it keeps Lucene resources. If you expect to use <methodname>scroll,</methodname> but wish to load objects in batch, you can use <methodname>query.setFetchSize()</methodname>. When an object is accessed, and if not already loaded, Hibernate Search will load the next <literal>fetchSize</literal> objects in one pass."
+msgstr ""
+
+#. Tag: para
+#: query.xml:373
+#, no-c-format
+msgid "Pagination is a preferred method over scrolling though."
+msgstr ""
+
+#. Tag: title
+#: query.xml:377
+#, no-c-format
+msgid "Result size"
+msgstr ""
+
+#. Tag: para
+#: query.xml:379
+#, no-c-format
+msgid "It is sometime useful to know the total number of matching documents:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:384
+#, no-c-format
+msgid "for the Google-like feature 1-10 of about 888,000,000"
+msgstr ""
+
+#. Tag: para
+#: query.xml:388
+#, no-c-format
+msgid "to implement a fast pagination navigation"
+msgstr ""
+
+#. Tag: para
+#: query.xml:392
+#, no-c-format
+msgid "to implement a multi step search engine (adding approximation if the restricted query return no or not enough results)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:397
+#, no-c-format
+msgid "Of course it would be too costly to retrieve all the matching documents. Hibernate Search allows you to retrieve the total number of matching documents regardless of the pagination parameters. Even more interesting, you can retrieve the number of matching elements without triggering a single object load."
+msgstr ""
+
+#. Tag: title
+#: query.xml:404
+#, no-c-format
+msgid "Determining the result size of a query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:406
+#, no-c-format
+msgid ""
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );\n"
+ "assert 3245 == <emphasis role=\"bold\">query.getResultSize()</emphasis>; //return the number of matching books without loading a single one\n"
+ "\n"
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );\n"
+ "query.setMaxResult(10);\n"
+ "List results = query.list();\n"
+ "assert 3245 == <emphasis role=\"bold\">query.getResultSize()</emphasis>; //return the total number of matching books regardless of pagination"
+msgstr ""
+
+#. Tag: para
+#: query.xml:410
+#, no-c-format
+msgid "Like Google, the number of results is approximative if the index is not fully up-to-date with the database (asynchronous cluster for example)."
+msgstr ""
+
+#. Tag: title
+#: query.xml:417
+#, no-c-format
+msgid "ResultTransformer"
+msgstr ""
+
+#. Tag: para
+#: query.xml:419
+#, no-c-format
+msgid "Especially when using projection, the data structure returned by a query (an object array in this case), is not always matching the application needs. It is possible to apply a <classname>ResultTransformer</classname> operation post query to match the targeted data structure:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:426
+#, no-c-format
+msgid "Using ResultTransformer in conjunction with projections"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:428
+#, no-c-format
+msgid ""
+ "org.hibernate.search.FullTextQuery query = s.createFullTextQuery( luceneQuery, Book.class );\n"
+ "query.setProjection( \"title\", \"mainAuthor.name\" );\n"
+ "\n"
+ "<emphasis role=\"bold\">query.setResultTransformer( \n"
+ " new StaticAliasToBeanResultTransformer( BookView.class, \"title\", \"author\" ) \n"
+ ");</emphasis>\n"
+ "List<BookView> results = (List<BookView>) query.list();\n"
+ "for(BookView view : results) {\n"
+ " log.info( \"Book: \" + view.getTitle() + \", \" + view.getAuthor() );\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:431
+#, no-c-format
+msgid "Examples of <classname>ResultTransformer</classname> implementations can be found in the Hibernate Core codebase."
+msgstr ""
+
+#. Tag: title
+#: query.xml:436
+#, no-c-format
+msgid "Understanding results"
+msgstr ""
+
+#. Tag: para
+#: query.xml:438
+#, no-c-format
+msgid "You will find yourself sometimes puzzled by a result showing up in a query or a result not showing up in a query. Luke is a great tool to understand those mysteries. However, Hibernate Search also gives you access to the Lucene <classname>Explanation</classname> object for a given result (in a given query). This class is considered fairly advanced to Lucene users but can provide a good understanding of the scoring of an object. You have two ways to access the Explanation object for a given result:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:449
+#, no-c-format
+msgid "Use the <methodname>fullTextQuery.explain(int)</methodname> method"
+msgstr ""
+
+#. Tag: para
+#: query.xml:454
+#, no-c-format
+msgid "Use projection"
+msgstr ""
+
+#. Tag: para
+#: query.xml:458
+#, no-c-format
+msgid "The first approach takes a document id as a parameter and return the Explanation object. The document id can be retrieved using projection and the <literal>FullTextQuery.DOCUMENT_ID</literal> constant."
+msgstr ""
+
+#. Tag: para
+#: query.xml:464
+#, no-c-format
+msgid "The Document id has nothing to do with the entity id. Do not mess up these two notions."
+msgstr ""
+
+#. Tag: para
+#: query.xml:468
+#, no-c-format
+msgid "The second approach let's you project the <classname>Explanation</classname> object using the <literal>FullTextQuery.EXPLANATION</literal> constant."
+msgstr ""
+
+#. Tag: title
+#: query.xml:473
+#, no-c-format
+msgid "Retrieving the Lucene Explanation object using projection"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:476
+#, no-c-format
+msgid ""
+ "FullTextQuery ftQuery = s.createFullTextQuery( luceneQuery, Dvd.class )\n"
+ " .setProjection( FullTextQuery.DOCUMENT_ID, <emphasis role=\"bold\">FullTextQuery.EXPLANATION</emphasis>, FullTextQuery.THIS );\n"
+ "@SuppressWarnings(\"unchecked\") List<Object[]> results = ftQuery.list();\n"
+ "for (Object[] result : results) {\n"
+ " Explanation e = (Explanation) result[1];\n"
+ " display( e.toString() );\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:479
+#, no-c-format
+msgid "Be careful, building the explanation object is quite expensive, it is roughly as expensive as running the Lucene query again. Don't do it if you don't need the object"
+msgstr ""
+
+#. Tag: title
+#: query.xml:486
+#, no-c-format
+msgid "Filters"
+msgstr ""
+
+#. Tag: para
+#: query.xml:488
+#, no-c-format
+msgid "Apache Lucene has a powerful feature that allows to filter query results according to a custom filtering process. This is a very powerful way to apply additional data restrictions, especially since filters can be cached and reused. Some interesting use cases are:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:495
+#, no-c-format
+msgid "security"
+msgstr ""
+
+#. Tag: para
+#: query.xml:499
+#, no-c-format
+msgid "temporal data (eg. view only last month's data)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:503
+#, no-c-format
+msgid "population filter (eg. search limited to a given category)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:508
+#, no-c-format
+msgid "and many more"
+msgstr ""
+
+#. Tag: para
+#: query.xml:512
+#, no-c-format
+msgid "Hibernate Search pushes the concept further by introducing the notion of parameterizable named filters which are transparently cached. For people familiar with the notion of Hibernate Core filters, the API is very similar:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:518
+#, no-c-format
+msgid "Enabling fulltext filters for a given query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:520
+#, no-c-format
+msgid ""
+ "fullTextQuery = s.createFullTextQuery( query, Driver.class );\n"
+ "fullTextQuery.enableFullTextFilter(\"bestDriver\");\n"
+ "fullTextQuery.enableFullTextFilter(\"security\").setParameter( \"login\", \"andre\" );\n"
+ "fullTextQuery.list(); //returns only best drivers where andre has credentials"
+msgstr ""
+
+#. Tag: para
+#: query.xml:523
+#, no-c-format
+msgid "In this example we enabled two filters on top of the query. You can enable (or disable) as many filters as you like."
+msgstr ""
+
+#. Tag: para
+#: query.xml:526
+#, no-c-format
+msgid "Declaring filters is done through the <classname>@FullTextFilterDef</classname> annotation. This annotation can be on any <literal>@Indexed</literal> entity regardless of the query the filter is later applied to. This implies that filter definitions are global and their names must be unique. A <classname>SearchException</classname> is thrown in case two different <classname>@FullTextFilterDef</classname> annotations with the same name are defined. Each named filter has to specify its actual filter implementation."
+msgstr ""
+
+#. Tag: title
+#: query.xml:537
+#, no-c-format
+msgid "Defining and implementing a Filter"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:539
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "@FullTextFilterDefs( {\n"
+ " <emphasis role=\"bold\">@FullTextFilterDef(name = \"bestDriver\", impl = BestDriversFilter.class)</emphasis>, \n"
+ " <emphasis role=\"bold\">@FullTextFilterDef(name = \"security\", impl = SecurityFilterFactory.class)</emphasis> \n"
+ "})\n"
+ "public class Driver { ... }"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:541
+#, no-c-format
+msgid ""
+ "public class BestDriversFilter extends <emphasis\n"
+ " role=\"bold\">org.apache.lucene.search.Filter</emphasis> {\n"
+ "\n"
+ " public DocIdSet getDocIdSet(IndexReader reader) throws IOException {\n"
+ " OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );\n"
+ " TermDocs termDocs = reader.termDocs( new Term( \"score\", \"5\" ) );\n"
+ " while ( termDocs.next() ) {\n"
+ " bitSet.set( termDocs.doc() );\n"
+ " }\n"
+ " return bitSet;\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:544
+#, no-c-format
+msgid "<classname>BestDriversFilter</classname> is an example of a simple Lucene filter which reduces the result set to drivers whose score is 5. In this example the specified filter implements the <literal>org.apache.lucene.search.Filter</literal> directly and contains a no-arg constructor."
+msgstr ""
+
+#. Tag: para
+#: query.xml:550
+#, no-c-format
+msgid "If your Filter creation requires additional steps or if the filter you want to use does not have a no-arg constructor, you can use the factory pattern:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:555
+#, no-c-format
+msgid "Creating a filter using the factory pattern"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:557
+#, no-c-format
+msgid ""
+ "@Entity\n"
+ "@Indexed\n"
+ "@FullTextFilterDef(name = \"bestDriver\", impl = BestDriversFilterFactory.class)\n"
+ "public class Driver { ... }\n"
+ "\n"
+ "public class BestDriversFilterFactory {\n"
+ "\n"
+ " <emphasis role=\"bold\">@Factory</emphasis>\n"
+ " public Filter getFilter() {\n"
+ " //some additional steps to cache the filter results per IndexReader\n"
+ " Filter bestDriversFilter = new BestDriversFilter();\n"
+ " return new CachingWrapperFilter(bestDriversFilter);\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:560
+#, no-c-format
+msgid "Hibernate Search will look for a <literal>@Factory</literal> annotated method and use it to build the filter instance. The factory must have a no-arg constructor. For people familiar with JBoss Seam, this is similar to the component factory pattern, but the annotation is different!"
+msgstr ""
+
+#. Tag: para
+#: query.xml:566
+#, no-c-format
+msgid "Named filters come in handy where parameters have to be passed to the filter. For example a security filter might want to know which security level you want to apply:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:571
+#, no-c-format
+msgid "Passing parameters to a defined filter"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:573
+#, no-c-format
+msgid ""
+ "fullTextQuery = s.createFullTextQuery( query, Driver.class );\n"
+ "fullTextQuery.enableFullTextFilter(\"security\")<emphasis role=\"bold\">.setParameter( \"level\", 5 )</emphasis>;"
+msgstr ""
+
+#. Tag: para
+#: query.xml:576
+#, no-c-format
+msgid "Each parameter name should have an associated setter on either the filter or filter factory of the targeted named filter definition."
+msgstr ""
+
+#. Tag: title
+#: query.xml:580
+#, no-c-format
+msgid "Using parameters in the actual filter implementation"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:582
+#, no-c-format
+msgid ""
+ "public class SecurityFilterFactory {\n"
+ " private Integer level;\n"
+ "\n"
+ " /**\n"
+ " * injected parameter\n"
+ " */\n"
+ " <emphasis role=\"bold\">public void setLevel(Integer level)</emphasis> {\n"
+ " this.level = level;\n"
+ " }\n"
+ "\n"
+ " <emphasis role=\"bold\">@Key\n"
+ " public FilterKey getKey()</emphasis> {\n"
+ " StandardFilterKey key = new StandardFilterKey();\n"
+ " key.addParameter( level );\n"
+ " return key;\n"
+ " }\n"
+ "\n"
+ " @Factory\n"
+ " public Filter getFilter() {\n"
+ " Query query = new TermQuery( new Term(\"level\", level.toString() ) );\n"
+ " return new CachingWrapperFilter( new QueryWrapperFilter(query) );\n"
+ " }\n"
+ "}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:585
+#, no-c-format
+msgid "Note the method annotated <classname>@Key</classname> returning a <classname>FilterKey</classname> object. The returned object has a special contract: the key object must implement <methodname>equals()</methodname> / <methodname>hashCode()</methodname> so that 2 keys are equal if and only if the given <classname>Filter</classname> types are the same and the set of parameters are the same. In other words, 2 filter keys are equal if and only if the filters from which the keys are generated can be interchanged. The key object is used as a key in the cache mechanism."
+msgstr ""
+
+#. Tag: para
+#: query.xml:594
+#, no-c-format
+msgid "<classname>@Key</classname> methods are needed only if:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:598
+#, no-c-format
+msgid "you enabled the filter caching system (enabled by default)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:603
+#, no-c-format
+msgid "your filter has parameters"
+msgstr ""
+
+#. Tag: para
+#: query.xml:607
+#, no-c-format
+msgid "In most cases, using the <literal>StandardFilterKey</literal> implementation will be good enough. It delegates the <methodname>equals()</methodname> / <methodname>hashCode()</methodname> implementation to each of the parameters equals and hashcode methods."
+msgstr ""
+
+#. Tag: para
+#: query.xml:613
+#, no-c-format
+msgid "As mentioned before the defined filters are per default cached and the cache uses a combination of hard and soft references to allow disposal of memory when needed. The hard reference cache keeps track of the most recently used filters and transforms the ones least used to <classname>SoftReferences</classname> when needed. Once the limit of the hard reference cache is reached additional filters are cached as <classname>SoftReferences</classname>. To adjust the size of the hard reference cache, use <literal>hibernate.search.filter.cache_strategy.size</literal> (defaults to 128). For advanced use of filter caching, you can implement your own <classname>FilterCachingStrategy</classname>. The classname is defined by <literal>hibernate.search.filter.cache_strategy</literal>."
+msgstr ""
+
+#. Tag: para
+#: query.xml:626
+#, no-c-format
+msgid "This filter caching mechanism should not be confused with caching the actual filter results. In Lucene it is common practice to wrap filters using the <classname>IndexReader</classname> around a <classname>CachingWrapperFilter.</classname> The wrapper will cache the <classname>DocIdSet</classname> returned from the <methodname>getDocIdSet(IndexReader reader)</methodname> method to avoid expensive recomputation. It is important to mention that the computed <classname>DocIdSet</classname> is only cachable for the same <classname>IndexReader</classname> instance, because the reader effectively represents the state of the index at the moment it was opened. The document list cannot change within an opened <classname>IndexReader</classname>. A different/new<classname> IndexReader</classname> instance, however, works potentially on a different set of <classname>Document</classname>s (either from a different index or simply because the index has changed), hence the cached <c!
lassname>DocIdSet</classname> has to be recomputed."
+msgstr ""
+
+#. Tag: para
+#: query.xml:643
+#, no-c-format
+msgid "Hibernate Search also helps with this aspect of caching. Per default the <literal>cache</literal> flag of <classname>@FullTextFilterDef </classname>is set to <literal>FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS</literal> which will automatically cache the filter instance as well as wrap the specified filter around a Hibernate specific implementation of <classname>CachingWrapperFilter</classname> (<classname>org.hibernate.search.filter.CachingWrapperFilter</classname>). In contrast to Lucene's version of this class <classname>SoftReference</classname>s are used together with a hard reference count (see discussion about filter cache). The hard reference count can be adjusted using <literal>hibernate.search.filter.cache_docidresults.size</literal> (defaults to 5). The wrapping behaviour can be controlled using the <literal>@FullTextFilterDef.cache</literal> parameter. There are three different values for this parameter:"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:664
+#, no-c-format
+msgid "Value"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:666
+#, no-c-format
+msgid "Definition"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:672
+#, no-c-format
+msgid "FilterCacheModeType.NONE"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:674
+#, no-c-format
+msgid "No filter instance and no result is cached by Hibernate Search. For every filter call, a new filter instance is created. This setting might be useful for rapidly changing data sets or heavily memory constrained environments."
+msgstr ""
+
+#. Tag: entry
+#: query.xml:681
+#, no-c-format
+msgid "FilterCacheModeType.INSTANCE_ONLY"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:683
+#, no-c-format
+msgid "The filter instance is cached and reused across concurrent <methodname>Filter.getDocIdSet()</methodname> calls. <classname>DocIdSet</classname> results are not cached. This setting is useful when a filter uses its own specific caching mechanism or the filter results change dynamically due to application specific events making <classname>DocIdSet</classname> caching in both cases unnecessary."
+msgstr ""
+
+#. Tag: entry
+#: query.xml:695
+#, no-c-format
+msgid "FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:697
+#, no-c-format
+msgid "Both the filter instance and the <classname>DocIdSet</classname> results are cached. This is the default value."
+msgstr ""
+
+#. Tag: para
+#: query.xml:702
+#, no-c-format
+msgid "Last but not least - why should filters be cached? There are two areas where filter caching shines:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:708
+#, no-c-format
+msgid "the system does not update the targeted entity index often (in other words, the IndexReader is reused a lot)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:713
+#, no-c-format
+msgid "the Filter's DocIdSet is expensive to compute (compared to the time spent to execute the query)"
+msgstr ""
+
+#. Tag: title
+#: query.xml:720
+#, no-c-format
+msgid "Optimizing the query process"
+msgstr ""
+
+#. Tag: para
+#: query.xml:722
+#, no-c-format
+msgid "Query performance depends on several criteria:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:726
+#, no-c-format
+msgid "the Lucene query itself: read the literature on this subject"
+msgstr ""
+
+#. Tag: para
+#: query.xml:731
+#, no-c-format
+msgid "the number of object loaded: use pagination (always ;-) ) or index projection (if needed)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:736
+#, no-c-format
+msgid "the way Hibernate Search interacts with the Lucene readers: defines the appropriate <xref linkend=\"search-architecture-readerstrategy\"/>."
+msgstr ""
+
+#. Tag: title
+#: query.xml:744
+#, no-c-format
+msgid "Native Lucene Queries"
+msgstr ""
+
+#. Tag: para
+#: query.xml:746
+#, no-c-format
+msgid "If you wish to use some specific features of Lucene, you can always run Lucene specific queries. Check <xref linkend=\"search-lucene-native\"/> for more information."
+msgstr ""
+
Property changes on: search/trunk/hibernate-search/src/main/docbook/pot/modules/query.pot
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/master.po
===================================================================
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/master.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/architecture.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/architecture.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/architecture.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,373 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: architecture.xml:30
+#, no-c-format
+msgid "Architecture"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:33
+#, no-c-format
+msgid "Overview"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:35
+#, no-c-format
+msgid ""
+"Hibernate Search consists of an indexing component and an index search "
+"component. Both are backed by Apache Lucene."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:38
+#, no-c-format
+msgid ""
+"Each time an entity is inserted, updated or removed in/from the database, "
+"Hibernate Search keeps track of this event (through the Hibernate event "
+"system) and schedules an index update. All the index updates are handled "
+"without you having to use the Apache Lucene APIs (see <xref linkend=\"search-"
+"configuration-event\"/>)."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:44
+#, no-c-format
+msgid ""
+"To interact with Apache Lucene indexes, Hibernate Search has the notion of "
+"<classname>DirectoryProvider</classname>s. A directory provider will manage "
+"a given Lucene <classname>Directory</classname> type. You can configure "
+"directory providers to adjust the directory target (see <xref linkend="
+"\"search-configuration-directory\"/>)."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:50
+#, no-c-format
+msgid ""
+"Hibernate Search uses the Lucene index to search an entity and return a list "
+"of managed entities saving you the tedious object to Lucene document "
+"mapping. The same persistence context is shared between Hibernate and "
+"Hibernate Search. As a matter of fact, the <classname>FullTextSession</"
+"classname> is built on top of the Hibernate Session. so that the application "
+"code can use the unified <classname>org.hibernate.Query</classname> or "
+"<classname>javax.persistence.Query</classname> APIs exactly the way a HQL, "
+"JPA-QL or native queries would do."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:60
+#, no-c-format
+msgid ""
+"To be more efficient, Hibernate Search batches the write interactions with "
+"the Lucene index. There is currently two types of batching depending on the "
+"expected scope. Outside a transaction, the index update operation is "
+"executed right after the actual database operation. This scope is really a "
+"no scoping setup and no batching is performed. However, it is recommended - "
+"for both your database and Hibernate Search - to execute your operation in a "
+"transaction be it JDBC or JTA. When in a transaction, the index update "
+"operation is scheduled for the transaction commit phase and discarded in "
+"case of transaction rollback. The batching scope is the transaction. There "
+"are two immediate benefits:"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:73
+#, no-c-format
+msgid ""
+"Performance: Lucene indexing works better when operation are executed in "
+"batch."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:78
+#, no-c-format
+msgid ""
+"ACIDity: The work executed has the same scoping as the one executed by the "
+"database transaction and is executed if and only if the transaction is "
+"committed. This is not ACID in the strict sense of it, but ACID behavior is "
+"rarely useful for full text search indexes since they can be rebuilt from "
+"the source at any time."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:86
+#, no-c-format
+msgid ""
+"You can think of those two scopes (no scope vs transactional) as the "
+"equivalent of the (infamous) autocommit vs transactional behavior. From a "
+"performance perspective, the <emphasis>in transaction</emphasis> mode is "
+"recommended. The scoping choice is made transparently. Hibernate Search "
+"detects the presence of a transaction and adjust the scoping."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:92
+#, no-c-format
+msgid ""
+"Hibernate Search works perfectly fine in the Hibernate / EntityManager long "
+"conversation pattern aka. atomic conversation."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:96
+#, no-c-format
+msgid ""
+"Depending on user demand, additional scoping will be considered, the "
+"pluggability mechanism being already in place."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:102
+#, no-c-format
+msgid "Back end"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:104
+#, no-c-format
+msgid ""
+"Hibernate Search offers the ability to let the scoped work being processed "
+"by different back ends. Two back ends are provided out of the box for two "
+"different scenarios."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:109
+#, no-c-format
+msgid "Back end types"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:112
+#, no-c-format
+msgid "Lucene"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:114
+#, no-c-format
+msgid ""
+"In this mode, all index update operations applied on a given node (JVM) will "
+"be executed to the Lucene directories (through the directory providers) by "
+"the same node. This mode is typically used in non clustered environment or "
+"in clustered environments where the directory store is shared."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:132
+#, no-c-format
+msgid "Lucene back end configuration."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:136
+#, no-c-format
+msgid ""
+"This mode targets non clustered applications, or clustered applications "
+"where the Directory is taking care of the locking strategy."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:140
+#, no-c-format
+msgid ""
+"The main advantage is simplicity and immediate visibility of the changes in "
+"Lucene queries (a requirement in some applications)."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:145
+#, no-c-format
+msgid "<title>JMS</title>"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:147
+#, no-c-format
+msgid ""
+"All index update operations applied on a given node are sent to a JMS queue. "
+"A unique reader will then process the queue and update the master index. The "
+"master index is then replicated on a regular basis to the slave copies. This "
+"is known as the master/slaves pattern. The master is the sole responsible "
+"for updating the Lucene index. The slaves can accept read as well as write "
+"operations. However, they only process the read operation on their local "
+"index copy and delegate the update operations to the master."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:167
+#, no-c-format
+msgid "JMS back end configuration."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:171
+#, no-c-format
+msgid ""
+"This mode targets clustered environments where throughput is critical, and "
+"index update delays are affordable. Reliability is ensured by the JMS "
+"provider and by having the slaves working on a local copy of the index."
+msgstr ""
+
+#. Tag: note
+#: architecture.xml:177
+#, no-c-format
+msgid ""
+"Hibernate Search is an extensible architecture. Feel free to drop ideas for "
+"other third party back ends to <literal>hibernate-dev(a)lists.jboss.org</"
+"literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:183
+#, no-c-format
+msgid "Work execution"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:185
+#, no-c-format
+msgid ""
+"The indexing work (done by the back end) can be executed synchronously with "
+"the transaction commit (or update operation if out of transaction), or "
+"asynchronously."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:190
+#, no-c-format
+msgid "Synchronous"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:192
+#, no-c-format
+msgid ""
+"This is the safe mode where the back end work is executed in concert with "
+"the transaction commit. Under highly concurrent environment, this can lead "
+"to throughput limitations (due to the Apache Lucene lock mechanism) and it "
+"can increase the system response time if the backend is significantly slower "
+"than the transactional process and if a lot of IO operations are involved."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:201
+#, no-c-format
+msgid "Asynchronous"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:203
+#, no-c-format
+msgid ""
+"This mode delegates the work done by the back end to a different thread. "
+"That way, throughput and response time are (to a certain extend) "
+"decorrelated from the back end performance. The drawback is that a small "
+"delay appears between the transaction commit and the index update and a "
+"small overhead is introduced to deal with thread management."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:210
+#, no-c-format
+msgid ""
+"It is recommended to use synchronous execution first and evaluate "
+"asynchronous execution if performance problems occur and after having set up "
+"a proper benchmark (ie not a lonely cowboy hitting the system in a "
+"completely unrealistic way)."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:219
+#, no-c-format
+msgid "Reader strategy"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:221
+#, no-c-format
+msgid ""
+"When executing a query, Hibernate Search interacts with the Apache Lucene "
+"indexes through a reader strategy. Choosing a reader strategy will depend on "
+"the profile of the application (frequent updates, read mostly, asynchronous "
+"index update etc). See also"
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:228
+#, no-c-format
+msgid "Shared"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:230
+#, no-c-format
+msgid ""
+"With this strategy, Hibernate Search will share the same "
+"<classname>IndexReader</classname>, for a given Lucene index, across "
+"multiple queries and threads provided that the <classname>IndexReader</"
+"classname> is still up-to-date. If the <classname>IndexReader</classname> is "
+"not up-to-date, a new one is opened and provided. Each "
+"<classname>IndexReader</classname> is made of several "
+"<classname>SegmentReader</classname>s. This strategy only reopens segments "
+"that have been modified or created after last opening and shares the already "
+"loaded segments from the previous instance. This strategy is the default."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:241
+#, no-c-format
+msgid "The name of this strategy is <literal>shared</literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:245
+#, no-c-format
+msgid "Not-shared"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:247
+#, no-c-format
+msgid ""
+"Every time a query is executed, a Lucene <classname>IndexReader</classname> "
+"is opened. This strategy is not the most efficient since opening and warming "
+"up an <classname>IndexReader</classname> can be a relatively expensive "
+"operation."
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:253
+#, no-c-format
+msgid "The name of this strategy is <literal>not-shared</literal>."
+msgstr ""
+
+#. Tag: title
+#: architecture.xml:257
+#, no-c-format
+msgid "Custom"
+msgstr ""
+
+#. Tag: para
+#: architecture.xml:259
+#, no-c-format
+msgid ""
+"You can write your own reader strategy that suits your application needs by "
+"implementing <classname>org.hibernate.search.reader.ReaderProvider</"
+"classname>. The implementation must be thread safe."
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/architecture.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/batchindex.po
===================================================================
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/batchindex.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/configuration.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/configuration.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/configuration.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1605 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: configuration.xml:30
+#, no-c-format
+msgid "Configuration"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:33
+#, no-c-format
+msgid "Directory configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:35
+#, no-c-format
+msgid ""
+"Apache Lucene has a notion of <literal>Directory</literal> to store the "
+"index files. The <classname>Directory</classname> implementation can be "
+"customized, but Lucene comes bundled with a file system "
+"(<literal>FSDirectoryProvider</literal>) and an in memory "
+"(<literal>RAMDirectoryProvider</literal>) implementation. "
+"<literal>DirectoryProvider</literal>s are the Hibernate Search abstraction "
+"around a Lucene <classname>Directory</classname> and handle the "
+"configuration and the initialization of the underlying Lucene resources. "
+"<xref linkend=\"directory-provider-table\"/> shows the list of the directory "
+"providers bundled with Hibernate Search."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:47
+#, no-c-format
+msgid "List of built-in Directory Providers"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:52 configuration.xml:811
+#, no-c-format
+msgid "Class"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:54 configuration.xml:349 configuration.xml:635
+#: configuration.xml:813
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:56
+#, no-c-format
+msgid "Properties"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:62
+#, no-c-format
+msgid "org.hibernate.search.store.RAMDirectoryProvider"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:64
+#, no-c-format
+msgid ""
+"Memory based directory, the directory will be uniquely identified (in the "
+"same deployment unit) by the <literal>@Indexed.index</literal> element"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:68 configuration.xml:868
+#, no-c-format
+msgid "none"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:72
+#, no-c-format
+msgid "org.hibernate.search.store.FSDirectoryProvider"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:74
+#, no-c-format
+msgid ""
+"File system based directory. The directory used will be <indexBase>/"
+"< indexName >"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:77
+#, no-c-format
+msgid "<literal>indexBase</literal> : Base directory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:78 configuration.xml:102 configuration.xml:136
+#, no-c-format
+msgid ""
+"<literal>indexName</literal>: override @Indexed.index (useful for sharded "
+"indexes)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:79 configuration.xml:113 configuration.xml:147
+#, no-c-format
+msgid "<literal> locking_strategy</literal> : optional, see"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:86
+#, no-c-format
+msgid "org.hibernate.search.store.FSMasterDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:88
+#, no-c-format
+msgid ""
+"File system based directory. Like FSDirectoryProvider. It also copies the "
+"index to a source directory (aka copy directory) on a regular basis."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:91 configuration.xml:125
+#, no-c-format
+msgid ""
+"The recommended value for the refresh period is (at least) 50% higher that "
+"the time to copy the information (default 3600 seconds - 60 minutes)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:93 configuration.xml:127
+#, no-c-format
+msgid ""
+"Note that the copy is based on an incremental copy mechanism reducing the "
+"average copy time."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:95
+#, no-c-format
+msgid ""
+"DirectoryProvider typically used on the master node in a JMS back end "
+"cluster."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:96 configuration.xml:130
+#, no-c-format
+msgid ""
+"The <literal> buffer_size_on_copy</literal> optimum depends on your "
+"operating system and available RAM; most people reported good results using "
+"values between 16 and 64MB."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:101 configuration.xml:135
+#, no-c-format
+msgid "<literal>indexBase</literal>: Base directory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:104 configuration.xml:138
+#, no-c-format
+msgid "<literal>sourceBase</literal>: Source (copy) base directory."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:105 configuration.xml:139
+#, no-c-format
+msgid ""
+"<literal>source</literal>: Source directory suffix (default to "
+"<literal>@Indexed.index</literal>). The actual source directory name being "
+"<filename><sourceBase>/<source></filename>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:109 configuration.xml:143
+#, no-c-format
+msgid ""
+"<literal>refresh</literal>: refresh period in second (the copy will take "
+"place every refresh seconds)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:110 configuration.xml:144
+#, no-c-format
+msgid ""
+"<literal>buffer_size_on_copy</literal>: The amount of MegaBytes to move in a "
+"single low level copy instruction; defaults to 16MB."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:120
+#, no-c-format
+msgid "org.hibernate.search.store.FSSlaveDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:122
+#, no-c-format
+msgid ""
+"File system based directory. Like FSDirectoryProvider, but retrieves a "
+"master version (source) on a regular basis. To avoid locking and "
+"inconsistent search results, 2 local copies are kept."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:129
+#, no-c-format
+msgid "DirectoryProvider typically used on slave nodes using a JMS back end."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:156
+#, no-c-format
+msgid ""
+"If the built-in directory providers do not fit your needs, you can write "
+"your own directory provider by implementing the <classname>org.hibernate."
+"store.DirectoryProvider</classname> interface."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:161
+#, no-c-format
+msgid ""
+"Each indexed entity is associated to a Lucene index (an index can be shared "
+"by several entities but this is not usually the case). You can configure the "
+"index through properties prefixed by <constant>hibernate.search.</"
+"constant><replaceable>indexname</replaceable> . Default properties inherited "
+"to all indexes can be defined using the prefix <constant>hibernate.search."
+"default.</constant>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:168
+#, no-c-format
+msgid ""
+"To define the directory provider of a given index, you use the "
+"<constant>hibernate.search.<replaceable>indexname</replaceable>."
+"directory_provider </constant>"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:173
+#, no-c-format
+msgid "Configuring directory providers"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:175
+#, no-c-format
+msgid ""
+"hibernate.search.default.directory_provider org.hibernate.search.store."
+"FSDirectoryProvider\n"
+"hibernate.search.default.indexBase=/usr/lucene/indexes\n"
+"hibernate.search.Rules.directory_provider org.hibernate.search.store."
+"RAMDirectoryProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:178
+#, no-c-format
+msgid "applied on"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:181
+#, no-c-format
+msgid ""
+"Specifying the index name using the <literal>index</literal> parameter of "
+"<classname>@Indexed</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:184
+#, no-c-format
+msgid ""
+"@Indexed(index=\"Status\")\n"
+"public class Status { ... }\n"
+"\n"
+"@Indexed(index=\"Rules\")\n"
+"public class Rule { ... }"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:187
+#, no-c-format
+msgid ""
+"will create a file system directory in <filename>/usr/lucene/indexes/Status</"
+"filename> where the Status entities will be indexed, and use an in memory "
+"directory named <literal>Rules</literal> where Rule entities will be indexed."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:192
+#, no-c-format
+msgid ""
+"You can easily define common rules like the directory provider and base "
+"directory, and override those defaults later on on a per index basis."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:196
+#, no-c-format
+msgid ""
+"Writing your own <classname>DirectoryProvider</classname>, you can utilize "
+"this configuration mechanism as well."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:201
+#, no-c-format
+msgid "Sharding indexes"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:203
+#, no-c-format
+msgid ""
+"In some extreme cases involving huge indexes (in size), it is necessary to "
+"split (shard) the indexing data of a given entity type into several Lucene "
+"indexes. This solution is not recommended until you reach significant index "
+"sizes and index update times are slowing the application down. The main "
+"drawback of index sharding is that searches will end up being slower since "
+"more files have to be opened for a single search. In other words don't do it "
+"until you have problems :)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:211
+#, no-c-format
+msgid ""
+"Despite this strong warning, Hibernate Search allows you to index a given "
+"entity type into several sub indexes. Data is sharded into the different sub "
+"indexes thanks to an <classname>IndexShardingStrategy</classname>. By "
+"default, no sharding strategy is enabled, unless the number of shards is "
+"configured. To configure the number of shards use the following property"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:219
+#, no-c-format
+msgid ""
+"Enabling index sharding by specifying nbr_of_shards for a specific index"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:222
+#, no-c-format
+msgid "hibernate.search.<indexName>.sharding_strategy.nbr_of_shards 5"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:225
+#, no-c-format
+msgid "This will use 5 different shards."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:227
+#, no-c-format
+msgid ""
+"The default sharding strategy, when shards are set up, splits the data "
+"according to the hash value of the id string representation (generated by "
+"the Field Bridge). This ensures a fairly balanced sharding. You can replace "
+"the strategy by implementing <literal>IndexShardingStrategy</literal> and by "
+"setting the following property"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:235
+#, no-c-format
+msgid "Specifying a custom sharding strategy"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:237
+#, no-c-format
+msgid ""
+"hibernate.search.<indexName>.sharding_strategy my.shardingstrategy."
+"Implementation"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:240
+#, no-c-format
+msgid ""
+"Each shard has an independent directory provider configuration as described "
+"in <xref linkend=\"search-configuration-directory\"/>. The DirectoryProvider "
+"default name for the previous example are <literal><indexName>.0</"
+"literal> to <literal><indexName>.4</literal>. In other words, each "
+"shard has the name of it's owning index followed by <constant>.</constant> "
+"(dot) and its index number."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:249
+#, no-c-format
+msgid ""
+"Configuring the sharding configuration for an example entity "
+"<classname>Animal</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:252
+#, no-c-format
+msgid ""
+"hibernate.search.default.indexBase /usr/lucene/indexes\n"
+"\n"
+"hibernate.search.Animal.sharding_strategy.nbr_of_shards 5\n"
+"hibernate.search.Animal.directory_provider org.hibernate.search.store."
+"FSDirectoryProvider\n"
+"hibernate.search.Animal.0.indexName Animal00\n"
+"hibernate.search.Animal.3.indexBase /usr/lucene/sharded\n"
+"hibernate.search.Animal.3.indexName Animal03"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:255
+#, no-c-format
+msgid ""
+"This configuration uses the default id string hashing strategy and shards "
+"the Animal index into 5 subindexes. All subindexes are "
+"<classname>FSDirectoryProvider</classname> instances and the directory where "
+"each subindex is stored is as followed:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:262
+#, no-c-format
+msgid ""
+"for subindex 0: /usr/lucene/indexes/Animal00 (shared indexBase but "
+"overridden indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:267
+#, no-c-format
+msgid ""
+"for subindex 1: /usr/lucene/indexes/Animal.1 (shared indexBase, default "
+"indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:272
+#, no-c-format
+msgid ""
+"for subindex 2: /usr/lucene/indexes/Animal.2 (shared indexBase, default "
+"indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:277
+#, no-c-format
+msgid ""
+"for subindex 3: /usr/lucene/shared/Animal03 (overridden indexBase, "
+"overridden indexName)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:282
+#, no-c-format
+msgid ""
+"for subindex 4: /usr/lucene/indexes/Animal.4 (shared indexBase, default "
+"indexName)"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:289
+#, no-c-format
+msgid "Sharing indexes (two entities into the same directory)"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:292
+#, no-c-format
+msgid ""
+"This is only presented here so that you know the option is available. There "
+"is really not much benefit in sharing indexes."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:296
+#, no-c-format
+msgid ""
+"It is technically possible to store the information of more than one entity "
+"into a single Lucene index. There are two ways to accomplish this:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:302
+#, no-c-format
+msgid ""
+"Configuring the underlying directory providers to point to the same physical "
+"index directory. In practice, you set the property <literal>hibernate.search."
+"[fully qualified entity name].indexName</literal> to the same value. As an "
+"example let’s use the same index (directory) for the <classname>Furniture</"
+"classname> and <classname>Animal</classname> entity. We just set "
+"<literal>indexName</literal> for both entities to for example “Animal”. Both "
+"entities will then be stored in the Animal directory"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:312
+#, no-c-format
+msgid ""
+"<code>hibernate.search.org.hibernate.search.test.shards.Furniture.indexName "
+"= Animal\n"
+"hibernate.search.org.hibernate.search.test.shards.Animal.indexName = Animal</"
+"code>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:316
+#, no-c-format
+msgid ""
+"Setting the <code>@Indexed</code> annotation’s <methodname>index</"
+"methodname> attribute of the entities you want to merge to the same value. "
+"If we again wanted all <classname>Furniture</classname> instances to be "
+"indexed in the <classname>Animal</classname> index along with all instances "
+"of <classname>Animal</classname> we would specify <code>@Indexed"
+"(index=”Animal”)</code> on both <classname>Animal</classname> and "
+"<classname>Furniture</classname> classes."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:330
+#, no-c-format
+msgid "Worker configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:332
+#, no-c-format
+msgid ""
+"It is possible to refine how Hibernate Search interacts with Lucene through "
+"the worker configuration. The work can be executed to the Lucene directory "
+"or sent to a JMS queue for later processing. When processed to the Lucene "
+"directory, the work can be processed synchronously or asynchronously to the "
+"transaction commit."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:338
+#, no-c-format
+msgid "You can define the worker configuration using the following properties"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:342
+#, no-c-format
+msgid "worker configuration"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:347 configuration.xml:633
+#, no-c-format
+msgid "Property"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:353
+#, no-c-format
+msgid "hibernate.search.worker.backend"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:355
+#, no-c-format
+msgid ""
+"Out of the box support for the Apache Lucene back end and the JMS back end. "
+"Default to <literal>lucene</literal>. Supports also <literal>jms</literal> "
+"and <literal>blackhole</literal>."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:361
+#, no-c-format
+msgid "hibernate.search.worker.execution"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:363
+#, no-c-format
+msgid ""
+"Supports synchronous and asynchronous execution. Default to "
+"<literal><literal>sync</literal></literal>. Supports also <literal>async</"
+"literal>."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:369
+#, no-c-format
+msgid "hibernate.search.worker.thread_pool.size"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:371
+#, no-c-format
+msgid ""
+"Defines the number of threads in the pool. useful only for asynchronous "
+"execution. Default to 1."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:376
+#, no-c-format
+msgid "hibernate.search.worker.buffer_queue.max"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:378
+#, no-c-format
+msgid ""
+"Defines the maximal number of work queue if the thread poll is starved. "
+"Useful only for asynchronous execution. Default to infinite. If the limit is "
+"reached, the work is done by the main thread."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:385
+#, no-c-format
+msgid "hibernate.search.worker.jndi.*"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:387
+#, no-c-format
+msgid ""
+"Defines the JNDI properties to initiate the InitialContext (if needed). JNDI "
+"is only used by the JMS back end."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:392
+#, no-c-format
+msgid "hibernate.search.worker.jms.connection_factory"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:395
+#, no-c-format
+msgid ""
+"Mandatory for the JMS back end. Defines the JNDI name to lookup the JMS "
+"connection factory from (<literal>/ConnectionFactory</literal> by default in "
+"JBoss AS)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:402
+#, no-c-format
+msgid "hibernate.search.worker.jms.queue"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:404
+#, no-c-format
+msgid ""
+"Mandatory for the JMS back end. Defines the JNDI name to lookup the JMS "
+"queue from. The queue will be used to post work messages."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:414
+#, no-c-format
+msgid "JMS Master/Slave configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:416
+#, no-c-format
+msgid ""
+"This section describes in greater detail how to configure the Master / "
+"Slaves Hibernate Search architecture."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:429
+#, no-c-format
+msgid "JMS back end configuration."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:433
+#, no-c-format
+msgid "Slave nodes"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:435
+#, no-c-format
+msgid ""
+"Every index update operation is sent to a JMS queue. Index querying "
+"operations are executed on a local index copy."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:439
+#, no-c-format
+msgid "JMS Slave configuration"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:441
+#, no-c-format
+msgid ""
+"### slave configuration\n"
+"\n"
+"## DirectoryProvider\n"
+"# (remote) master location\n"
+"hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/"
+"mastercopy\n"
+"\n"
+"# local copy location\n"
+"hibernate.search.default.indexBase = /Users/prod/lucenedirs\n"
+"\n"
+"# refresh every half hour\n"
+"hibernate.search.default.refresh = 1800\n"
+"\n"
+"# appropriate directory provider\n"
+"hibernate.search.default.directory_provider = org.hibernate.search.store."
+"FSSlaveDirectoryProvider\n"
+"\n"
+"## Backend configuration\n"
+"hibernate.search.worker.backend = jms\n"
+"hibernate.search.worker.jms.connection_factory = /ConnectionFactory\n"
+"hibernate.search.worker.jms.queue = queue/hibernatesearch\n"
+"#optional jndi configuration (check your JMS provider for more information)\n"
+"\n"
+"## Optional asynchronous execution strategy\n"
+"# hibernate.search.worker.execution = async\n"
+"# hibernate.search.worker.thread_pool.size = 2\n"
+"# hibernate.search.worker.buffer_queue.max = 50"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:444
+#, no-c-format
+msgid "A file system local copy is recommended for faster search results."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:447 configuration.xml:463
+#, no-c-format
+msgid "The refresh period should be higher that the expected time copy."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:452
+#, no-c-format
+msgid "Master node"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:454
+#, no-c-format
+msgid ""
+"Every index update operation is taken from a JMS queue and executed. The "
+"master index is copied on a regular basis."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:458
+#, no-c-format
+msgid "JMS Master configuration"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:460
+#, no-c-format
+msgid ""
+"### master configuration\n"
+"\n"
+"## DirectoryProvider\n"
+"# (remote) master location where information is copied to\n"
+"hibernate.search.default.sourceBase = /mnt/mastervolume/lucenedirs/"
+"mastercopy\n"
+"\n"
+"# local master location\n"
+"hibernate.search.default.indexBase = /Users/prod/lucenedirs\n"
+"\n"
+"# refresh every half hour\n"
+"hibernate.search.default.refresh = 1800\n"
+"\n"
+"# appropriate directory provider\n"
+"hibernate.search.default.directory_provider = org.hibernate.search.store."
+"FSMasterDirectoryProvider\n"
+"\n"
+"## Backend configuration\n"
+"#Backend is the default lucene one"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:466
+#, no-c-format
+msgid ""
+"In addition to the Hibernate Search framework configuration, a Message "
+"Driven Bean should be written and set up to process the index works queue "
+"through JMS."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:471
+#, no-c-format
+msgid "Message Driven Bean processing the indexing queue"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:473
+#, no-c-format
+msgid ""
+"@MessageDriven(activationConfig = {\n"
+" @ActivationConfigProperty(propertyName=\"destinationType\", "
+"propertyValue=\"javax.jms.Queue\"),\n"
+" @ActivationConfigProperty(propertyName=\"destination\", propertyValue="
+"\"queue/hibernatesearch\"),\n"
+" @ActivationConfigProperty(propertyName=\"DLQMaxResent\", propertyValue="
+"\"1\")\n"
+" } )\n"
+"public class MDBSearchController extends "
+"AbstractJMSHibernateSearchController implements MessageListener {\n"
+" @PersistenceContext EntityManager em;\n"
+" \n"
+" //method retrieving the appropriate session\n"
+" protected Session getSession() {\n"
+" return (Session) em.getDelegate();\n"
+" }\n"
+"\n"
+" //potentially close the session opened in #getSession(), not needed "
+"here\n"
+" protected void cleanSessionIfNeeded(Session session) \n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:476
+#, no-c-format
+msgid ""
+"This example inherits from the abstract JMS controller class available in "
+"the Hibernate Search source code and implements a JavaEE 5 MDB. This "
+"implementation is given as an example and, while most likely be more "
+"complex, can be adjusted to make use of non Java EE Message Driven Beans. "
+"For more information about the <methodname>getSession()</methodname> and "
+"<methodname>cleanSessionIfNeeded()</methodname>, please check "
+"<classname>AbstractJMSHibernateSearchController</classname>'s javadoc."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:489
+#, no-c-format
+msgid "Reader strategy configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:491
+#, no-c-format
+msgid ""
+"The different reader strategies are described in <xref linkend=\"search-"
+"architecture-readerstrategy\"/>. Out of the box strategies are:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:497
+#, no-c-format
+msgid ""
+"<literal>shared</literal>: share index readers across several queries. This "
+"strategy is the most efficient."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:502
+#, no-c-format
+msgid ""
+"<literal>not-shared</literal>: create an index reader for each individual "
+"query"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:507
+#, no-c-format
+msgid ""
+"The default reader strategy is <literal>shared</literal>. This can be "
+"adjusted:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:510
+#, no-c-format
+msgid "hibernate.search.reader.strategy = not-shared"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:512
+#, no-c-format
+msgid ""
+"Adding this property switches to the <literal>not-shared</literal> strategy."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:515
+#, no-c-format
+msgid "Or if you have a custom reader strategy:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:517
+#, no-c-format
+msgid "hibernate.search.reader.strategy = my.corp.myapp.CustomReaderProvider"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:519
+#, no-c-format
+msgid ""
+"where <classname>my.corp.myapp.CustomReaderProvider</classname> is the "
+"custom strategy implementation."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:524
+#, no-c-format
+msgid "Enabling Hibernate Search and automatic indexing"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:527
+#, no-c-format
+msgid "Enabling Hibernate Search"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:529
+#, no-c-format
+msgid ""
+"Hibernate Search is enabled out of the box when using Hibernate Annotations "
+"or Hibernate EntityManager. If, for some reason you need to disable it, set "
+"<literal>hibernate.search.autoregister_listeners</literal> to false. Note "
+"that there is no performance penalty when the listeners are enabled even "
+"though no entities are indexed."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:536
+#, no-c-format
+msgid ""
+"To enable Hibernate Search in Hibernate Core (ie. if you don't use Hibernate "
+"Annotations), add the <literal>FullTextIndexEventListener</literal> for the "
+"following six Hibernate events and also add it after the default "
+"<literal>DefaultFlushEventListener</literal>, as in the following example."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:543
+#, no-c-format
+msgid ""
+"Explicitly enabling Hibernate Search by configuring the "
+"<classname>FullTextIndexEventListener</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:546
+#, no-c-format
+msgid ""
+"<hibernate-configuration>\n"
+" <session-factory>\n"
+" ...\n"
+" <event type=\"post-update\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"post-insert\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"post-delete\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"post-collection-recreate\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"post-collection-remove\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"post-collection-update\">\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" <event type=\"flush\">\n"
+" <listener class=\"org.hibernate.event.def."
+"DefaultFlushEventListener\"/>\n"
+" <listener class=\"org.hibernate.search.event."
+"FullTextIndexEventListener\"/>\n"
+" </event>\n"
+" </session-factory>\n"
+"</hibernate-configuration>"
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:551
+#, no-c-format
+msgid "Automatic indexing"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:553
+#, no-c-format
+msgid ""
+"By default, every time an object is inserted, updated or deleted through "
+"Hibernate, Hibernate Search updates the according Lucene index. It is "
+"sometimes desirable to disable that features if either your index is read-"
+"only or if index updates are done in a batch way (see <xref linkend=\"search-"
+"batchindex\"/>)."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:559
+#, no-c-format
+msgid "To disable event based indexing, set"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:561
+#, no-c-format
+msgid "hibernate.search.indexing_strategy manual"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:564
+#, no-c-format
+msgid ""
+"In most case, the JMS backend provides the best of both world, a lightweight "
+"event based system keeps track of all changes in the system, and the "
+"heavyweight indexing process is done by a separate process or machine."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:573
+#, no-c-format
+msgid "Tuning Lucene indexing performance"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:575
+#, no-c-format
+msgid ""
+"Hibernate Search allows you to tune the Lucene indexing performance by "
+"specifying a set of parameters which are passed through to underlying Lucene "
+"<literal>IndexWriter</literal> such as <literal>mergeFactor</literal>, "
+"<literal>maxMergeDocs</literal> and <literal>maxBufferedDocs</literal>. You "
+"can specify these parameters either as default values applying for all "
+"indexes, on a per index basis, or even per shard."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:583
+#, no-c-format
+msgid ""
+"There are two sets of parameters allowing for different performance settings "
+"depending on the use case. During indexing operations triggered by database "
+"modifications, the parameters are grouped by the <literal>transaction</"
+"literal> keyword: <programlisting>hibernate.search.[default|<"
+"indexname>].indexwriter.transaction.<parameter_name></"
+"programlisting> When indexing occurs via <literal>FullTextSession.index()</"
+"literal> (see <xref linkend=\"search-batchindex\"/>), the used properties "
+"are those grouped under the <literal>batch</literal> keyword:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:589
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.batch.<"
+"parameter_name>"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:591
+#, no-c-format
+msgid ""
+"Unless the corresponding <literal>.batch</literal> property is explicitly "
+"set, the value will default to the <literal>.transaction</literal> property. "
+"If no value is set for a <literal>.batch</literal> value in a specific shard "
+"configuration, Hibernate Search will look at the index section, then at the "
+"default section and after that it will look for a <literal>.transaction</"
+"literal> in the same order: <programlisting>hibernate.search.Animals.2."
+"indexwriter.transaction.max_merge_docs 10\n"
+"hibernate.search.Animals.2.indexwriter.transaction.merge_factor 20\n"
+"hibernate.search.default.indexwriter.batch.max_merge_docs 100</"
+"programlisting> This configuration will result in these settings applied to "
+"the second shard of Animals index:"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:603
+#, no-c-format
+msgid "<literal>transaction.max_merge_docs</literal> = 10"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:607
+#, no-c-format
+msgid "<literal>batch.max_merge_docs</literal> = 100"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:611
+#, no-c-format
+msgid "<literal>transaction.merge_factor</literal> = 20"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:615
+#, no-c-format
+msgid "<literal>batch.merge_factor</literal> = 20"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:619
+#, no-c-format
+msgid "All other values will use the defaults defined in Lucene."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:621
+#, no-c-format
+msgid ""
+"The default for all values is to leave them at Lucene's own default, so the "
+"listed values in the following table actually depend on the version of "
+"Lucene you are using; values shown are relative to version <literal>2.4</"
+"literal>. For more information about Lucene indexing performances, please "
+"refer to the Lucene documentation."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:628
+#, no-c-format
+msgid "List of indexing performance and behavior properties"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:637
+#, no-c-format
+msgid "Default Value"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:643
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"max_buffered_delete_terms"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:645
+#, no-c-format
+msgid ""
+"Determines the minimal number of delete terms required before the buffered "
+"in-memory delete terms are applied and flushed. If there are documents "
+"buffered in memory at the time, they are merged and a new segment is created."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:650 configuration.xml:660
+#, no-c-format
+msgid "Disabled (flushes by RAM usage)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:654
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"max_buffered_docs"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:656
+#, no-c-format
+msgid ""
+"Controls the amount of documents buffered in memory during indexing. The "
+"bigger the more RAM is consumed."
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:664
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"max_field_length"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:666
+#, no-c-format
+msgid ""
+"The maximum number of terms that will be indexed for a single field. This "
+"limits the amount of memory required for indexing so that very large data "
+"will not crash the indexing process by running out of memory. This setting "
+"refers to the number of running terms, not to the number of different terms."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:671
+#, no-c-format
+msgid ""
+"This silently truncates large documents, excluding from the index all terms "
+"that occur further in the document. If you know your source documents are "
+"large, be sure to set this value high enough to accommodate the expected "
+"size. If you set it to Integer.MAX_VALUE, then the only limit is your "
+"memory, but you should anticipate an OutOfMemoryError."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:676
+#, no-c-format
+msgid ""
+"If setting this value in <literal>batch</literal> differently than in "
+"<literal>transaction</literal> you may get different data (and results) in "
+"your index depending on the indexing mode."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:682
+#, no-c-format
+msgid "10000"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:686
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"max_merge_docs"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:688
+#, no-c-format
+msgid ""
+"Defines the largest number of documents allowed in a segment. Larger values "
+"are best for batched indexing and speedier searches. Small values are best "
+"for transaction indexing."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:693
+#, no-c-format
+msgid "Unlimited (Integer.MAX_VALUE)"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:697
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"merge_factor"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:699
+#, no-c-format
+msgid "Controls segment merge frequency and size."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:700
+#, no-c-format
+msgid ""
+"Determines how often segment indexes are merged when insertion occurs. With "
+"smaller values, less RAM is used while indexing, and searches on unoptimized "
+"indexes are faster, but indexing speed is slower. With larger values, more "
+"RAM is used during indexing, and while searches on unoptimized indexes are "
+"slower, indexing is faster. Thus larger values (> 10) are best for batch "
+"index creation, and smaller values (< 10) for indexes that are "
+"interactively maintained. The value must no be lower than 2."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:710
+#, no-c-format
+msgid "<entry>10</entry>"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:714
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"ram_buffer_size"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:716
+#, no-c-format
+msgid ""
+"Controls the amount of RAM in MB dedicated to document buffers. When used "
+"together max_buffered_docs a flush occurs for whichever event happens first."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:718
+#, no-c-format
+msgid ""
+"Generally for faster indexing performance it's best to flush by RAM usage "
+"instead of document count and use as large a RAM buffer as you can."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:723
+#, no-c-format
+msgid "16 MB"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:727
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"term_index_interval"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:729
+#, no-c-format
+msgid "Expert: Set the interval between indexed terms."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:730
+#, no-c-format
+msgid ""
+"Large values cause less memory to be used by IndexReader, but slow random-"
+"access to terms. Small values cause more memory to be used by an "
+"IndexReader, and speed random-access to terms. See Lucene documentation for "
+"more details."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:736
+#, no-c-format
+msgid "<entry>128</entry>"
+msgstr ""
+
+#. Tag: literal
+#: configuration.xml:740
+#, no-c-format
+msgid ""
+"hibernate.search.[default|<indexname>].indexwriter.[transaction|batch]."
+"use_compound_file"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:742
+#, no-c-format
+msgid ""
+"The advantage of using the compound file format is that less file "
+"descriptors are used. The disadvantage is that indexing takes more time and "
+"temporary disk space. You can set this parameter to <literal>false</literal> "
+"in an attempt to improve the indexing time, but you could run out of file "
+"descriptors if <literal>mergeFactor</literal> is also large."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:748
+#, no-c-format
+msgid ""
+"Boolean parameter, use \"<literal>true</literal>\" or \"<literal>false</"
+"literal>\". The default value for this option is <literal>true</literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:753
+#, no-c-format
+msgid "true"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:759
+#, no-c-format
+msgid ""
+"To tune the indexing speed it might be useful to time the object loading "
+"from database in isolation from the writes to the index. To achieve this set "
+"the <literal>blackhole</literal> as worker backend and start you indexing "
+"routines. This backend does not disable Hibernate Search: it will still "
+"generate the needed changesets to the index, but will discard them instead "
+"of flushing them to the index. As opposite to setting the <literal>hibernate."
+"search.indexing_strategy</literal> to <literal>manual</literal> when using "
+"<literal>blackhole</literal> it will possibly load more data to rebuild the "
+"index from associated entities."
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:769
+#, no-c-format
+msgid "hibernate.search.worker.backend blackhole"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:771
+#, no-c-format
+msgid ""
+"The recommended approach is to focus first on optimizing the object loading, "
+"and then use the timings you achieve as a baseline to tune the indexing "
+"process."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:773
+#, no-c-format
+msgid ""
+"The <literal>blackhole</literal> backend is not meant to be used in "
+"production, only as a tool to identify indexing bottlenecks."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:781
+#, no-c-format
+msgid "LockFactory configuration"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:785
+#, no-c-format
+msgid ""
+"Lucene Directories have default locking strategies which work well for most "
+"cases, but it's possible to specify for each index managed by Hibernate "
+"Search which LockingFactory you want to use."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:789
+#, no-c-format
+msgid ""
+"Some of these locking strategies require a filesystem level lock and may be "
+"used even on RAM based indexes, but this is not recommended and of no "
+"practical use."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:795
+#, no-c-format
+msgid ""
+"To select a locking factory, set the <literal>hibernate.search.<index>."
+"locking_strategy</literal> option to one of <literal>simple</literal>, "
+"<literal>native</literal>, <literal>single</literal> or <literal>none</"
+"literal>, or set it to the fully qualified name of an implementation of "
+"<literal>org.hibernate.search.store.LockFactoryFactory</literal>; "
+"Implementing this interface you can provide a custom <literal>org.apache."
+"lucene.store.LockFactory</literal>."
+msgstr ""
+
+#. Tag: title
+#: configuration.xml:804
+#, no-c-format
+msgid "List of available LockFactory implementations"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:809
+#, no-c-format
+msgid "name"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:819
+#, no-c-format
+msgid "simple"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:821
+#, no-c-format
+msgid "org.apache.lucene.store.SimpleFSLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:824
+#, no-c-format
+msgid ""
+"Safe implementation based on Java's File API, it marks the usage of the "
+"index by creating a marker file."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:827
+#, no-c-format
+msgid ""
+"If for some reason you had to kill your application, you will need to remove "
+"this file before restarting it."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:830
+#, no-c-format
+msgid ""
+"This is the default implementation for <literal>FSDirectoryProvider</"
+"literal>,<literal>FSMasterDirectoryProvider</literal> and "
+"<literal>FSSlaveDirectoryProvider</literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:837
+#, no-c-format
+msgid "native"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:839
+#, no-c-format
+msgid "org.apache.lucene.store.NativeFSLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:842
+#, no-c-format
+msgid ""
+"As does <literal>simple</literal> this also marks the usage of the index by "
+"creating a marker file, but this one is using native OS file locks so that "
+"even if your application crashes the locks will be cleaned up."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:847
+#, no-c-format
+msgid "This implementation has known problems on NFS."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:852
+#, no-c-format
+msgid "single"
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:854
+#, no-c-format
+msgid "org.apache.lucene.store.SingleInstanceLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:857
+#, no-c-format
+msgid ""
+"This LockFactory doesn't use a file marker but is a Java object lock held in "
+"memory; therefore it's possible to use it only when you are sure the index "
+"is not going to be shared by any other process."
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:862
+#, no-c-format
+msgid ""
+"This is the default implementation for <literal>RAMDirectoryProvider</"
+"literal>."
+msgstr ""
+
+#. Tag: entry
+#: configuration.xml:870
+#, no-c-format
+msgid "org.apache.lucene.store.NoLockFactory"
+msgstr ""
+
+#. Tag: para
+#: configuration.xml:873
+#, no-c-format
+msgid ""
+"All changes to this index are not coordinated by any lock; test your "
+"application carefully and make sure you know what it means."
+msgstr ""
+
+#. Tag: section
+#: configuration.xml:880
+#, no-c-format
+msgid "Configuration example:"
+msgstr ""
+
+#. Tag: programlisting
+#: configuration.xml:884
+#, no-c-format
+msgid ""
+"hibernate.search.default.locking_strategy simple\n"
+"hibernate.search.Animals.locking_strategy native\n"
+"hibernate.search.Books.locking_strategy org.custom.components."
+"MyLockingFactory"
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/configuration.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/getting-started.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/getting-started.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/getting-started.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,873 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: getting-started.xml:30
+#, no-c-format
+msgid "Getting started"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:32
+#, no-c-format
+msgid ""
+"Welcome to Hibernate Search! The following chapter will guide you through "
+"the initial steps required to integrate Hibernate Search into an existing "
+"Hibernate enabled application. In case you are a Hibernate new timer we "
+"recommend you start <ulink url=\"http://hibernate.org/152.html\">here</"
+"ulink>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:39
+#, no-c-format
+msgid "System Requirements"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:42
+#, no-c-format
+msgid "System requirements"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:47
+#, no-c-format
+msgid "Java Runtime"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:49
+#, no-c-format
+msgid ""
+"A JDK or JRE version <emphasis>5</emphasis> or greater. You can download a "
+"Java Runtime for Windows/Linux/Solaris <ulink url=\"http://java.sun.com/"
+"javase/downloads/\">here</ulink>."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:55
+#, no-c-format
+msgid "Hibernate Search"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:57
+#, no-c-format
+msgid ""
+"<literal>hibernate-search.jar</literal> and all runtime dependencies from "
+"the <literal>lib</literal> directory of the Hibernate Search distribution. "
+"Please refer to <filename>README.txt </filename>in the lib directory to "
+"understand which dependencies are required."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:65
+#, no-c-format
+msgid "Hibernate Core"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:67
+#, no-c-format
+msgid ""
+"This instructions have been tested against Hibernate 3.3.x. You will need "
+"<literal>hibernate-core.jar</literal> and its transitive dependencies from "
+"the <literal>lib</literal> directory of the distribution. Refer to "
+"<literal>README.txt</literal> in the <literal>lib</literal> directory of the "
+"distribution to determine the minimum runtime requirements."
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:76
+#, no-c-format
+msgid "Hibernate Annotations"
+msgstr ""
+
+#. Tag: entry
+#: getting-started.xml:78
+#, no-c-format
+msgid ""
+"Even though Hibernate Search can be used without Hibernate Annotations the "
+"following instructions will use them for basic entity configuration "
+"(<emphasis>@Entity, @Id, @OneToMany,...</emphasis>). This part of the "
+"configuration could also be expressed in xml or code. However, Hibernate "
+"Search itself has its own set of annotations (<emphasis>@Indexed, "
+"@DocumentId, @Field,...</emphasis>) for which there exists so far no "
+"alternative configuration. The tutorial is tested against version 3.4.x of "
+"Hibernate Annotations."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:92
+#, no-c-format
+msgid ""
+"You can download all dependencies from the Hibernate <ulink url=\"http://www."
+"hibernate.org/6.html\">download site</ulink>. You can also verify the "
+"dependency versions against the <ulink url=\"http://www.hibernate.org/6."
+"html#A3\">Hibernate Compatibility Matrix</ulink>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:100
+#, no-c-format
+msgid "Using Maven"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:102
+#, no-c-format
+msgid ""
+"Instead of managing all dependencies manually, maven users have the "
+"possibility to use the <ulink url=\"http://repository.jboss.com/maven2"
+"\">JBoss maven repository</ulink>. Just add the JBoss repository url to the "
+"<emphasis>repositories</emphasis> section of your <filename>pom.xml</"
+"filename> or <filename>settings.xml</filename>:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:110
+#, no-c-format
+msgid "Adding the JBoss maven repository to <filename>settings.xml</filename>"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:113
+#, no-c-format
+msgid ""
+"<repository>\n"
+" <id>repository.jboss.org</id>\n"
+" <name>JBoss Maven Repository</name>\n"
+" <url>http://repository.jboss.org/maven2</url>\n"
+" <layout>default</layout>\n"
+"</repository>"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:116
+#, no-c-format
+msgid "Then add the following dependencies to your pom.xml:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:119
+#, no-c-format
+msgid "Maven dependencies for Hibernate Search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:121
+#, no-c-format
+msgid ""
+"<dependency>\n"
+" <groupId>org.hibernate</groupId>\n"
+" <artifactId>hibernate-search</artifactId>\n"
+" <version>3.1.0.GA</version>\n"
+"</dependency>\n"
+"<dependency>\n"
+" <groupId>org.hibernate</groupId>\n"
+" <artifactId>hibernate-annotations</artifactId>\n"
+" <version>3.4.0.GA</version>\n"
+"</dependency>\n"
+"<dependency>\n"
+" <groupId>org.hibernate</groupId>\n"
+" <artifactId>hibernate-entitymanager</artifactId>\n"
+" <version>3.4.0.GA</version>\n"
+"</dependency>\n"
+"<dependency>\n"
+" <groupId>org.apache.solr</groupId>\n"
+" <artifactId>solr-common</artifactId>\n"
+" <version>1.3.0</version>\n"
+"</dependency>\n"
+"<dependency>\n"
+" <groupId>org.apache.solr</groupId>\n"
+" <artifactId>solr-core</artifactId>\n"
+" <version>1.3.0</version>\n"
+"</dependency>\n"
+"<dependency>\n"
+" <groupId>org.apache.lucene</groupId>\n"
+" <artifactId>lucene-snowball</artifactId>\n"
+" <version>2.4.0</version>\n"
+"</dependency>"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:124
+#, no-c-format
+msgid ""
+"Not all dependencies are required. Only the <emphasis>hibernate-search</"
+"emphasis> dependency is mandatory. This dependency, together with its "
+"required transitive dependencies, contain all required classes needed to use "
+"Hibernate Search. <emphasis>hibernate-annotations</emphasis> is only needed "
+"if you want to use annotations to configure your domain model as we do in "
+"this tutorial. However, even if you choose not to use Hibernate Annotations "
+"you still have to use the Hibernate Search specific annotations, which are "
+"bundled with the hibernate-search jar file, to configure your Lucene index. "
+"Currently there is no XML configuration available for Hibernate Search. "
+"<emphasis>hibernate-entitymanager</emphasis> is required if you want to use "
+"Hibernate Search in conjunction with JPA. The Solr dependencies are needed "
+"if you want to utilize Solr's analyzer framework. More about this later. And "
+"finally, the <literal>lucene-snowball</literal> dependency is needed if you "
+"want to use Lucene's snowball stemmer."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:142
+#, no-c-format
+msgid "Configuration"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:144
+#, no-c-format
+msgid ""
+"Once you have downloaded and added all required dependencies to your "
+"application you have to add a couple of properties to your hibernate "
+"configuration file. If you are using Hibernate directly this can be done in "
+"<literal>hibernate.properties</literal> or <literal>hibernate.cfg.xml</"
+"literal>. If you are using Hibernate via JPA you can also add the properties "
+"to <literal>persistence.xml</literal>. The good news is that for standard "
+"use most properties offer a sensible default. An example "
+"<filename>persistence.xml</filename> configuration could look like this:"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:155
+#, no-c-format
+msgid ""
+"Basic configuration options to be added to <literal><filename>hibernate."
+"properties</filename></literal>, <literal><filename>hibernate.cfg.xml</"
+"filename></literal> or <filename>persistence.xml</filename>"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:160
+#, no-c-format
+msgid ""
+"...\n"
+"<property name=\"hibernate.search.default.directory_provider\" \n"
+" value=\"org.hibernate.search.store.FSDirectoryProvider\"/> \n"
+"\n"
+"<property name=\"hibernate.search.default.indexBase\" value=\"/var/lucene/"
+"indexes\"/> \n"
+"..."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:163
+#, no-c-format
+msgid ""
+"First you have to tell Hibernate Search which <classname>DirectoryProvider</"
+"classname> to use. This can be achieved by setting the <literal>hibernate."
+"search.default.directory_provider</literal> property. Apache Lucene has the "
+"notion of a <literal>Directory</literal> to store the index files. Hibernate "
+"Search handles the initialization and configuration of a Lucene "
+"<literal>Directory</literal> instance via a <literal>DirectoryProvider</"
+"literal>. In this tutorial we will use a subclass of "
+"<literal>DirectoryProvider</literal> called <classname>FSDirectoryProvider</"
+"classname>. This will give us the ability to physically inspect the Lucene "
+"indexes created by Hibernate Search (eg via <ulink url=\"http://www.getopt."
+"org/luke/\">Luke</ulink>). Once you have a working configuration you can "
+"start experimenting with other directory providers (see <xref linkend="
+"\"search-configuration-directory\"/>). Next to the directory provider you "
+"also have to specify the default root directory for all indexes via "
+"<literal>hibernate.search.default.indexBase</literal>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:180
+#, no-c-format
+msgid ""
+"Lets assume that your application contains the Hibernate managed classes "
+"<classname>example.Book</classname> and <classname>example.Author</"
+"classname> and you want to add free text search capabilities to your "
+"application in order to search the books contained in your database."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:187
+#, no-c-format
+msgid ""
+"Example entities Book and Author before adding Hibernate Search specific "
+"annotations"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:190
+#, no-c-format
+msgid ""
+"package example;\n"
+"...\n"
+"@Entity\n"
+"public class Book {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" private Integer id; \n"
+"\n"
+" private String title; \n"
+"\n"
+" private String subtitle; \n"
+"\n"
+" @ManyToMany \n"
+" private Set<Author> authors = new HashSet<Author>();\n"
+"\n"
+" private Date publicationDate;\n"
+" \n"
+" public Book() {\n"
+" } \n"
+" \n"
+" // standard getters/setters follow here\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:192
+#, no-c-format
+msgid ""
+"package example;\n"
+"...\n"
+"@Entity\n"
+"public class Author {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" private Integer id;\n"
+"\n"
+" private String name;\n"
+"\n"
+" public Author() {\n"
+" } \n"
+" \n"
+" // standard getters/setters follow here\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:195
+#, no-c-format
+msgid ""
+"To achieve this you have to add a few annotations to the <classname>Book</"
+"classname> and <classname>Author</classname> class. The first annotation "
+"<literal>@Indexed</literal> marks <classname>Book</classname> as indexable. "
+"By design Hibernate Search needs to store an untokenized id in the index to "
+"ensure index unicity for a given entity. <literal>@DocumentId</literal> "
+"marks the property to use for this purpose and is in most cases the same as "
+"the database primary key. In fact since the 3.1.0 release of Hibernate "
+"Search <literal>@DocumentId</literal> is optional in the case where an "
+"<classname>@Id</classname> annotation exists."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:206
+#, no-c-format
+msgid ""
+"Next you have to mark the fields you want to make searchable. Let's start "
+"with <literal>title</literal> and <literal>subtitle</literal> and annotate "
+"both with <literal>@Field</literal>. The parameter <literal>index=Index."
+"TOKENIZED</literal> will ensure that the text will be tokenized using the "
+"default Lucene analyzer. Usually, tokenizing means chunking a sentence into "
+"individual words and potentially excluding common words like <literal>'a'</"
+"literal> or '<literal>the</literal>'. We will talk more about analyzers a "
+"little later on. The second parameter we specify within <literal>@Field</"
+"literal>,<literal> store=Store.NO</literal>, ensures that the actual data "
+"will not be stored in the index. Whether this data is stored in the index or "
+"not has nothing to do with the ability to search for it. From Lucene's "
+"perspective it is not necessary to keep the data once the index is created. "
+"The benefit of storing it is the ability to retrieve it via projections "
+"(<xref linkend=\"projections\"/>)."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:222
+#, no-c-format
+msgid ""
+"Without projections, Hibernate Search will per default execute a Lucene "
+"query in order to find the database identifiers of the entities matching the "
+"query critera and use these identifiers to retrieve managed objects from the "
+"database. The decision for or against projection has to be made on a case to "
+"case basis. The default behaviour - <literal>Store.NO</literal> - is "
+"recommended since it returns managed objects whereas projections only return "
+"object arrays."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:230
+#, no-c-format
+msgid ""
+"After this short look under the hood let's go back to annotating the "
+"<classname>Book</classname> class. Another annotation we have not yet "
+"discussed is <literal>@DateBridge</literal>. This annotation is one of the "
+"built-in field bridges in Hibernate Search. The Lucene index is purely "
+"string based. For this reason Hibernate Search must convert the data types "
+"of the indexed fields to strings and vice versa. A range of predefined "
+"bridges are provided, including the <classname>DateBridge</classname> which "
+"will convert a <classname>java.util.Date</classname> into a "
+"<classname>String</classname> with the specified resolution. For more "
+"details see <xref linkend=\"search-mapping-bridge\"/>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:241
+#, no-c-format
+msgid ""
+"This leaves us with <literal>@IndexedEmbedded. </literal>This annotation is "
+"used to index associated entities (<literal>@ManyToMany</literal>, "
+"<literal>@*ToOne</literal> and <literal>@Embedded</literal>) as part of the "
+"owning entity. This is needed since a Lucene index document is a flat data "
+"structure which does not know anything about object relations. To ensure "
+"that the authors' name wil be searchable you have to make sure that the "
+"names are indexed as part of the book itself. On top of "
+"<literal>@IndexedEmbedded</literal> you will also have to mark all fields of "
+"the associated entity you want to have included in the index with "
+"<literal>@Indexed</literal>. For more details see <xref linkend=\"search-"
+"mapping-associated\"/>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:253
+#, no-c-format
+msgid ""
+"These settings should be sufficient for now. For more details on entity "
+"mapping refer to <xref linkend=\"search-mapping-entity\"/>."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:257
+#, no-c-format
+msgid "Example entities after adding Hibernate Search annotations"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:260
+#, no-c-format
+msgid ""
+"package example;\n"
+"...\n"
+"@Entity\n"
+"<emphasis role=\"bold\">@Indexed</emphasis>\n"
+"public class Book {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" <emphasis role=\"bold\">@DocumentId</emphasis>\n"
+" private Integer id;\n"
+" \n"
+" <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</"
+"emphasis>\n"
+" private String title;\n"
+" \n"
+" <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</"
+"emphasis>\n"
+" private String subtitle; \n"
+"\n"
+" <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+" @ManyToMany \n"
+" private Set<Author> authors = new HashSet<Author>();\n"
+"\n"
+"<emphasis role=\"bold\"> @Field(index = Index.UN_TOKENIZED, store = Store."
+"YES)\n"
+" @DateBridge(resolution = Resolution.DAY)</emphasis>\n"
+" private Date publicationDate;\n"
+" \n"
+" public Book() {\n"
+" } \n"
+" \n"
+" // standard getters/setters follow here\n"
+" ... \n"
+"}"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:262
+#, no-c-format
+msgid ""
+"package example;\n"
+"...\n"
+"@Entity\n"
+"public class Author {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" private Integer id;\n"
+"\n"
+" <emphasis role=\"bold\">@Field(index=Index.TOKENIZED, store=Store.NO)</"
+"emphasis>\n"
+" private String name;\n"
+"\n"
+" public Author() {\n"
+" } \n"
+" \n"
+" // standard getters/setters follow here\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:267
+#, no-c-format
+msgid "Indexing"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:269
+#, no-c-format
+msgid ""
+"Hibernate Search will transparently index every entity persisted, updated or "
+"removed through Hibernate Core. However, you have to trigger an initial "
+"indexing to populate the Lucene index with the data already present in your "
+"database. Once you have added the above properties and annotations it is "
+"time to trigger an initial batch index of your books. You can achieve this "
+"by using one of the following code snippets (see also <xref linkend=\"search-"
+"batchindex\"/>):"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:278
+#, no-c-format
+msgid "Using Hibernate Session to index data"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:280
+#, no-c-format
+msgid ""
+"FullTextSession fullTextSession = Search.getFullTextSession(session);\n"
+"Transaction tx = fullTextSession.beginTransaction();\n"
+"\n"
+"List books = session.createQuery(\"from Book as book\").list();\n"
+"for (Book book : books) {\n"
+" <emphasis role=\"bold\">fullTextSession.index(book);</emphasis>\n"
+"}\n"
+"\n"
+"tx.commit(); //index is written at commit time"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:284
+#, no-c-format
+msgid "Using JPA to index data"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:286
+#, no-c-format
+msgid ""
+"EntityManager em = entityManagerFactory.createEntityManager();\n"
+"FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager"
+"(em);\n"
+"em.getTransaction().begin();\n"
+"\n"
+"List books = em.createQuery(\"select book from Book as book\").getResultList"
+"();\n"
+"for (Book book : books) {\n"
+" <emphasis role=\"bold\">fullTextEntityManager.index(book);</emphasis>\n"
+"} \n"
+"\n"
+"em.getTransaction().commit();\n"
+"em.close();"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:289
+#, no-c-format
+msgid ""
+"After executing the above code, you should be able to see a Lucene index "
+"under <literal>/var/lucene/indexes/example.Book</literal>. Go ahead an "
+"inspect this index with <ulink url=\"http://www.getopt.org/luke/\">Luke</"
+"ulink>. It will help you to understand how Hibernate Search works."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:297
+#, no-c-format
+msgid "Searching"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:299
+#, no-c-format
+msgid ""
+"Now it is time to execute a first search. The general approach is to create "
+"a native Lucene query and then wrap this query into a org.hibernate.Query in "
+"order to get all the functionality one is used to from the Hibernate API. "
+"The following code will prepare a query against the indexed fields, execute "
+"it and return a list of <classname>Book</classname>s."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:307
+#, no-c-format
+msgid "Using Hibernate Session to create and execute a search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:309
+#, no-c-format
+msgid ""
+"FullTextSession fullTextSession = Search.getFullTextSession(session);\n"
+"Transaction tx = fullTextSession.beginTransaction();\n"
+"\n"
+"// create native Lucene query\n"
+"String[] fields = new String[]{\"title\", \"subtitle\", \"authors.name\", "
+"\"publicationDate\"};\n"
+"MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new "
+"StandardAnalyzer());\n"
+"org.apache.lucene.search.Query query = parser.parse( \"Java rocks!\" );\n"
+"\n"
+"// wrap Lucene query in a org.hibernate.Query\n"
+"org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery(query, "
+"Book.class);\n"
+"\n"
+"// execute search\n"
+"List result = hibQuery.list();\n"
+" \n"
+"tx.commit();\n"
+"session.close();"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:313
+#, no-c-format
+msgid "Using JPA to create and execute a search"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:315
+#, no-c-format
+msgid ""
+"EntityManager em = entityManagerFactory.createEntityManager();\n"
+"FullTextEntityManager fullTextEntityManager = \n"
+" org.hibernate.hibernate.search.jpa.Search.getFullTextEntityManager(em);\n"
+"em.getTransaction().begin();\n"
+"\n"
+"// create native Lucene query\n"
+"String[] fields = new String[]{\"title\", \"subtitle\", \"authors.name\", "
+"\"publicationDate\"};\n"
+"MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new "
+"StandardAnalyzer());\n"
+"org.apache.lucene.search.Query query = parser.parse( \"Java rocks!\" );\n"
+"\n"
+"// wrap Lucene query in a javax.persistence.Query\n"
+"javax.persistence.Query persistenceQuery = fullTextEntityManager."
+"createFullTextQuery(query, Book.class);\n"
+"\n"
+"// execute search\n"
+"List result = persistenceQuery.getResultList();\n"
+"\n"
+"em.getTransaction().commit();\n"
+"em.close();"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:320
+#, no-c-format
+msgid "Analyzer"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:322
+#, no-c-format
+msgid ""
+"Let's make things a little more interesting now. Assume that one of your "
+"indexed book entities has the title \"Refactoring: Improving the Design of "
+"Existing Code\" and you want to get hits for all of the following queries: "
+"\"refactor\", \"refactors\", \"refactored\" and \"refactoring\". In Lucene "
+"this can be achieved by choosing an analyzer class which applies word "
+"stemming during the indexing <emphasis role=\"bold\">as well as</emphasis> "
+"search process. Hibernate Search offers several ways to configure the "
+"analyzer to use (see <xref linkend=\"analyzer\"/>):"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:333
+#, no-c-format
+msgid ""
+"Setting the <literal>hibernate.search.analyzer</literal> property in the "
+"configuration file. The specified class will then be the default analyzer."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:339
+#, no-c-format
+msgid ""
+"Setting the <literal><literal>@Analyzer</literal></literal> annotation at "
+"the entity level."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:344
+#, no-c-format
+msgid ""
+"Setting the <literal>@<literal>Analyzer</literal></literal> annotation at "
+"the field level."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:349
+#, no-c-format
+msgid ""
+"When using the <literal>@Analyzer</literal> annotation one can either "
+"specify the fully qualified classname of the analyzer to use or one can "
+"refer to an analyzer definition defined by the <literal>@AnalyzerDef</"
+"literal> annotation. In the latter case the Solr analyzer framework with its "
+"factories approach is utilized. To find out more about the factory classes "
+"available you can either browse the Solr JavaDoc or read the corresponding "
+"section on the <ulink url=\"http://wiki.apache.org/solr/"
+"AnalyzersTokenizersTokenFilters\">Solr Wiki.</ulink> Note that depending on "
+"the chosen factory class additional libraries on top of the Solr "
+"dependencies might be required. For example, the "
+"<classname>PhoneticFilterFactory</classname> depends on <ulink url=\"http://"
+"commons.apache.org/codec\">commons-codec</ulink>."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:362
+#, no-c-format
+msgid ""
+"In the example below a <classname>StandardTokenizerFactory</classname> is "
+"used followed by two filter factories, <classname>LowerCaseFilterFactory</"
+"classname> and <classname>SnowballPorterFilterFactory</classname>. The "
+"standard tokenizer splits words at punctuation characters and hyphens while "
+"keeping email addresses and internet hostnames intact. It is a good general "
+"purpose tokenizer. The lowercase filter lowercases the letters in each token "
+"whereas the snowball filter finally applies language specific stemming."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:372
+#, no-c-format
+msgid ""
+"Generally, when using the Solr framework you have to start with a tokenizer "
+"followed by an arbitrary number of filters."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:376
+#, no-c-format
+msgid ""
+"Using <classname>@AnalyzerDef</classname> and the Solr framework to define "
+"and use an analyzer"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:379
+#, no-c-format
+msgid ""
+"package example;\n"
+"...\n"
+"@Entity\n"
+"@Indexed\n"
+"<emphasis role=\"bold\">@AnalyzerDef(name = \"customanalyzer\",\n"
+" tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+" filters = {\n"
+" @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+" @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {\n"
+" @Parameter(name = \"language\", value = \"English\")\n"
+" })\n"
+" })</emphasis>\n"
+"public class Book {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Integer id;\n"
+" \n"
+" @Field(index=Index.TOKENIZED, store=Store.NO)\n"
+" <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")</"
+"emphasis>\n"
+" private String title;\n"
+" \n"
+" @Field(index=Index.TOKENIZED, store=Store.NO)\n"
+" <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")</"
+"emphasis>\n"
+" private String subtitle; \n"
+"\n"
+" @IndexedEmbedded\n"
+" @ManyToMany \n"
+" private Set<Author> authors = new HashSet<Author>();\n"
+"\n"
+"<emphasis role=\"bold\"> </emphasis> @Field(index = Index.UN_TOKENIZED, "
+"store = Store.YES)\n"
+" @DateBridge(resolution = Resolution.DAY)\n"
+" private Date publicationDate;\n"
+" \n"
+" public Book() {\n"
+" } \n"
+" \n"
+" // standard getters/setters follow here\n"
+" ... \n"
+"}"
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:384
+#, no-c-format
+msgid "What's next"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:386
+#, no-c-format
+msgid ""
+"The above paragraphs hopefully helped you getting an overview of Hibernate "
+"Search. Using the maven archetype plugin and the following command you can "
+"create an initial runnable maven project structure populated with the "
+"example code of this tutorial."
+msgstr ""
+
+#. Tag: title
+#: getting-started.xml:392
+#, no-c-format
+msgid "Using the Maven archetype to create tutorial sources"
+msgstr ""
+
+#. Tag: programlisting
+#: getting-started.xml:394
+#, no-c-format
+msgid ""
+"mvn archetype:create \\ \n"
+" -DarchetypeGroupId=org.hibernate \\\n"
+" -DarchetypeArtifactId=hibernate-search-quickstart \\ \n"
+" -DarchetypeVersion=3.1.0.GA \\\n"
+" -DgroupId=my.company -DartifactId=quickstart"
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:397
+#, no-c-format
+msgid ""
+"Using the maven project you can execute the examples, inspect the file "
+"system based index and search and retrieve a list of managed objects. Just "
+"run <emphasis>mvn package</emphasis> to compile the sources and run the unit "
+"tests."
+msgstr ""
+
+#. Tag: para
+#: getting-started.xml:402
+#, no-c-format
+msgid ""
+"The next step after this tutorial is to get more familiar with the overall "
+"architecture of Hibernate Search (<xref linkend=\"search-architecture\"/>) "
+"and explore the basic features in more detail. Two topics which were only "
+"briefly touched in this tutorial were analyzer configuration (<xref linkend="
+"\"analyzer\"/>) and field bridges (<xref linkend=\"search-mapping-bridge\"/"
+">), both important features required for more fine-grained indexing. More "
+"advanced topics cover clustering (<xref linkend=\"jms-backend\"/>) and large "
+"indexes handling (<xref linkend=\"search-configuration-directory-sharding\"/"
+">)."
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/getting-started.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/lucene-native.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/lucene-native.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/lucene-native.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,320 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: lucene-native.xml:30
+#, no-c-format
+msgid "Advanced features"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:33
+#, no-c-format
+msgid "SearchFactory"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:35
+#, no-c-format
+msgid ""
+"The <classname>SearchFactory</classname> object keeps track of the "
+"underlying Lucene resources for Hibernate Search, it's also a convenient way "
+"to access Lucene natively. The <classname>SearchFactory</classname> can be "
+"accessed from a <classname>FullTextSession</classname>:"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:41
+#, no-c-format
+msgid "Accessing the <classname>SearchFactory</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:43
+#, no-c-format
+msgid ""
+"FullTextSession fullTextSession = Search.getFullTextSession"
+"(regularSession);\n"
+"SearchFactory searchFactory = fullTextSession.getSearchFactory();"
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:48
+#, no-c-format
+msgid "Accessing a Lucene Directory"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:50
+#, no-c-format
+msgid ""
+"You can always access the Lucene directories through plain Lucene, the "
+"Directory structure is in no way different with or without Hibernate Search. "
+"However there are some more convenient ways to access a given Directory. The "
+"<classname>SearchFactory</classname> keeps track of the "
+"<classname>DirectoryProvider</classname>s per indexed class. One directory "
+"provider can be shared amongst several indexed classes if the classes share "
+"the same underlying index directory. While usually not the case, a given "
+"entity can have several <classname>DirectoryProvider</classname>s if the "
+"index is sharded (see <xref linkend=\"search-configuration-directory-sharding"
+"\"/>)."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:62
+#, no-c-format
+msgid "Accessing the Lucene <classname>Directory</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:64
+#, no-c-format
+msgid ""
+"DirectoryProvider[] provider = searchFactory.getDirectoryProviders(Order."
+"class);\n"
+"org.apache.lucene.store.Directory directory = provider[0].getDirectory();"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:67
+#, no-c-format
+msgid ""
+"In this example, directory points to the lucene index storing "
+"<classname>Order</classname>s information. Note that the obtained Lucene "
+"directory must not be closed (this is Hibernate Search responsibility)."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:74
+#, no-c-format
+msgid "Using an IndexReader"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:76
+#, no-c-format
+msgid ""
+"Queries in Lucene are executed on an <literal>IndexReader</literal>. "
+"Hibernate Search caches all index readers to maximize performance. Your code "
+"can access this cached resources, but you have to follow some \"good citizen"
+"\" rules."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:82
+#, no-c-format
+msgid "Accessing an <classname>IndexReader</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: lucene-native.xml:84
+#, no-c-format
+msgid ""
+"DirectoryProvider orderProvider = searchFactory.getDirectoryProviders(Order."
+"class)[0];\n"
+"DirectoryProvider clientProvider = searchFactory.getDirectoryProviders"
+"(Client.class)[0];\n"
+"\n"
+"ReaderProvider readerProvider = searchFactory.getReaderProvider();\n"
+"IndexReader reader = readerProvider.openReader(orderProvider, "
+"clientProvider);\n"
+"\n"
+"try {\n"
+" //do read-only operations on the reader\n"
+"}\n"
+"finally {\n"
+" readerProvider.closeReader(reader);\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:87
+#, no-c-format
+msgid ""
+"The ReaderProvider (described in <xref linkend=\"search-architecture-"
+"readerstrategy\"/>), will open an IndexReader on top of the index(es) "
+"referenced by the directory providers. Because this <classname>IndexReader</"
+"classname> is shared amongst several clients, you must adhere to the "
+"following rules:"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:95
+#, no-c-format
+msgid ""
+"Never call indexReader.close(), but always call readerProvider.closeReader"
+"(reader), preferably in a finally block."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:101
+#, no-c-format
+msgid ""
+"Don't use this <classname>IndexReader</classname> for modification "
+"operations (you would get an exception). If you want to use a read/write "
+"index reader, open one from the Lucene Directory object."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:108
+#, no-c-format
+msgid ""
+"Aside from those rules, you can use the IndexReader freely, especially to do "
+"native queries. Using the shared <literal>IndexReader</literal>s will make "
+"most queries more efficient."
+msgstr ""
+
+#. Tag: title
+#: lucene-native.xml:115
+#, no-c-format
+msgid "Customizing Lucene's scoring formula"
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:117
+#, no-c-format
+msgid ""
+"Lucene allows the user to customize its scoring formula by extending "
+"<classname>org.apache.lucene.search.Similarity</classname>. The abstract "
+"methods defined in this class match the factors of the following formula "
+"calculating the score of query q for document d:"
+msgstr ""
+
+#. Tag: emphasis
+#: lucene-native.xml:122
+#, no-c-format
+msgid ""
+"score(q,d) = coord(q,d) · queryNorm(q) · ∑<subscript>t in q</subscript> ( tf"
+"(t in d) · idf(t)<superscript>2</superscript> · t.getBoost() · norm(t,d) )"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:131
+#, no-c-format
+msgid "Factor"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:133
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:139
+#, no-c-format
+msgid "tf(t ind)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:141
+#, no-c-format
+msgid "Term frequency factor for the term (t) in the document (d)."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:146
+#, no-c-format
+msgid "idf(t)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:148
+#, no-c-format
+msgid "Inverse document frequency of the term."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:152
+#, no-c-format
+msgid "coord(q,d)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:154
+#, no-c-format
+msgid ""
+"Score factor based on how many of the query terms are found in the specified "
+"document."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:159
+#, no-c-format
+msgid "queryNorm(q)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:161
+#, no-c-format
+msgid "Normalizing factor used to make scores between queries comparable."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:166
+#, no-c-format
+msgid "t.getBoost()"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:168
+#, no-c-format
+msgid "Field boost."
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:172
+#, no-c-format
+msgid "norm(t,d)"
+msgstr ""
+
+#. Tag: entry
+#: lucene-native.xml:174
+#, no-c-format
+msgid "Encapsulates a few (indexing time) boost and length factors."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:178
+#, no-c-format
+msgid ""
+"It is beyond the scope of this manual to explain this formula in more "
+"detail. Please refer to <classname>Similarity</classname>'s Javadocs for "
+"more information."
+msgstr ""
+
+#. Tag: para
+#: lucene-native.xml:183
+#, no-c-format
+msgid ""
+"Hibernate Search provides two ways to modify Lucene's similarity "
+"calculation. First you can set the default similarity by specifying the "
+"fully specified classname of your <classname>Similarity</classname> "
+"implementation using the property <constant>hibernate.search.similarity</"
+"constant>. The default value is <classname>org.apache.lucene.search."
+"DefaultSimilarity</classname>. Additionally you can override the default "
+"similarity on class level using the <literal>@Similarity</literal> "
+"annotation.<programlisting>@Entity\n"
+"@Indexed\n"
+"<emphasis role=\"bold\">@Similarity(impl = DummySimilarity.class)</"
+"emphasis>\n"
+"public class Book {\n"
+" ...\n"
+"}</programlisting>As an example, let's assume it is not important how often "
+"a term appears in a document. Documents with a single occurrence of the term "
+"should be scored the same as documents with multiple occurrences. In this "
+"case your custom implementation of the method <methodname>tf(float freq)</"
+"methodname> should return 1.0."
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/lucene-native.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/mapping.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/mapping.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/mapping.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,2081 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 16:41+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: mapping.xml:30
+#, no-c-format
+msgid "Mapping entities to the index structure"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:32
+#, no-c-format
+msgid ""
+"All the metadata information needed to index entities is described through "
+"annotations. There is no need for xml mapping files. In fact there is "
+"currently no xml configuration option available (see <ulink url=\"http://"
+"opensource.atlassian.com/projects/hibernate/browse/HSEARCH-210\">HSEARCH-"
+"210</ulink>). You can still use hibernate mapping files for the basic "
+"Hibernate configuration, but the Search specific configuration has to be "
+"expressed via annotations."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:41
+#, no-c-format
+msgid "Mapping an entity"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:44
+#, no-c-format
+msgid "Basic mapping"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:46
+#, no-c-format
+msgid ""
+"First, we must declare a persistent class as indexable. This is done by "
+"annotating the class with <literal>@Indexed</literal> (all entities not "
+"annotated with <literal>@Indexed</literal> will be ignored by the indexing "
+"process):"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:52
+#, no-c-format
+msgid ""
+"Making a class indexable using the <classname>@Indexed</classname> annotation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:55
+#, no-c-format
+msgid ""
+"@Entity\n"
+"<emphasis role=\"bold\">@Indexed(index=\"indexes/essays\")</emphasis>\n"
+"public class Essay {\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:58
+#, no-c-format
+msgid ""
+"The <literal>index</literal> attribute tells Hibernate what the Lucene "
+"directory name is (usually a directory on your file system). It is "
+"recommended to define a base directory for all Lucene indexes using the "
+"<literal>hibernate.search.default.indexBase</literal> property in your "
+"configuration file. Alternatively you can specify a base directory per "
+"indexed entity by specifying <literal>hibernate.search.<index>."
+"indexBase, </literal>where <literal><index></literal> is the fully "
+"qualified classname of the indexed entity. Each entity instance will be "
+"represented by a Lucene <classname>Document</classname> inside the given "
+"index (aka Directory)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:70
+#, no-c-format
+msgid ""
+"For each property (or attribute) of your entity, you have the ability to "
+"describe how it will be indexed. The default (no annotation present) means "
+"that the property is completely ignored by the indexing process. "
+"<literal>@Field</literal> does declare a property as indexed. When indexing "
+"an element to a Lucene document you can specify how it is indexed:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:79
+#, no-c-format
+msgid ""
+"<literal>name</literal> : describe under which name, the property should be "
+"stored in the Lucene Document. The default value is the property name "
+"(following the JavaBeans convention)"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:85
+#, no-c-format
+msgid ""
+"<literal>store</literal> : describe whether or not the property is stored in "
+"the Lucene index. You can store the value <literal>Store.YES</literal> "
+"(consuming more space in the index but allowing projection, see <xref "
+"linkend=\"projections\"/> for more information), store it in a compressed "
+"way <literal>Store.COMPRESS</literal> (this does consume more CPU), or avoid "
+"any storage <literal>Store.NO</literal> (this is the default value). When a "
+"property is stored, you can retrieve its original value from the Lucene "
+"Document. This is not related to whether the element is indexed or not."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:98
+#, no-c-format
+msgid ""
+"index: describe how the element is indexed and the type of information "
+"store. The different values are <literal>Index.NO</literal> (no indexing, ie "
+"cannot be found by a query), <literal>Index.TOKENIZED</literal> (use an "
+"analyzer to process the property), <literal>Index.UN_TOKENIZED</literal> (no "
+"analyzer pre-processing), <literal>Index.NO_NORMS</literal> (do not store "
+"the normalization data). The default value is <literal>TOKENIZED</literal>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:109
+#, no-c-format
+msgid ""
+"termVector: describes collections of term-frequency pairs. This attribute "
+"enables term vectors being stored during indexing so they are available "
+"within documents. The default value is TermVector.NO."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:114
+#, no-c-format
+msgid "The different values of this attribute are:"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:120
+#, no-c-format
+msgid "Value"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:122
+#, no-c-format
+msgid "Definition"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:128
+#, no-c-format
+msgid "TermVector.YES"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:130
+#, no-c-format
+msgid ""
+"Store the term vectors of each document. This produces two synchronized "
+"arrays, one contains document terms and the other contains the term's "
+"frequency."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:136
+#, no-c-format
+msgid "TermVector.NO"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:138
+#, no-c-format
+msgid "Do not store term vectors."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:142
+#, no-c-format
+msgid "TermVector.WITH_OFFSETS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:144
+#, no-c-format
+msgid ""
+"Store the term vector and token offset information. This is the same as "
+"TermVector.YES plus it contains the starting and ending offset position "
+"information for the terms."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:151
+#, no-c-format
+msgid "TermVector.WITH_POSITIONS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:153
+#, no-c-format
+msgid ""
+"Store the term vector and token position information. This is the same as "
+"TermVector.YES plus it contains the ordinal positions of each occurrence of "
+"a term in a document."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:160
+#, no-c-format
+msgid "TermVector.WITH_POSITION_OFFSETS"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:162
+#, no-c-format
+msgid ""
+"Store the term vector, token position and offset information. This is a "
+"combination of the YES, WITH_OFFSETS and WITH_POSITIONS."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:172
+#, no-c-format
+msgid ""
+"Whether or not you want to store the original data in the index depends on "
+"how you wish to use the index query result. For a regular Hibernate Search "
+"usage storing is not necessary. However you might want to store some fields "
+"to subsequently project them (see <xref linkend=\"projections\"/> for more "
+"information)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:178
+#, no-c-format
+msgid ""
+"Whether or not you want to tokenize a property depends on whether you wish "
+"to search the element as is, or by the words it contains. It make sense to "
+"tokenize a text field, but tokenizing a date field probably not. Note that "
+"fields used for sorting must not be tokenized."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:184
+#, no-c-format
+msgid ""
+"Finally, the id property of an entity is a special property used by "
+"Hibernate Search to ensure index unicity of a given entity. By design, an id "
+"has to be stored and must not be tokenized. To mark a property as index id, "
+"use the <literal>@DocumentId</literal> annotation. If you are using "
+"Hibernate Annotations and you have specified @Id you can omit @DocumentId. "
+"The chosen entity id will also be used as document id."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:193
+#, no-c-format
+msgid ""
+"Adding <classname>@DocumentId</classname> ad <classname>@Field</classname> "
+"annotations to an indexed entity"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:196
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed(index=\"indexes/essays\")\n"
+"public class Essay {\n"
+" ...\n"
+"\n"
+" @Id\n"
+" <emphasis role=\"bold\">@DocumentId</emphasis>\n"
+" public Long getId() { return id; }\n"
+"\n"
+" <emphasis role=\"bold\">@Field(name=\"Abstract\", index=Index.TOKENIZED, "
+"store=Store.YES)</emphasis>\n"
+" public String getSummary() { return summary; }\n"
+"\n"
+" @Lob\n"
+" <emphasis role=\"bold\">@Field(index=Index.TOKENIZED)</emphasis>\n"
+" public String getText() { return text; }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:199
+#, no-c-format
+msgid ""
+"The above annotations define an index with three fields: <literal>id</"
+"literal> , <literal>Abstract</literal> and <literal>text</literal> . Note "
+"that by default the field name is decapitalized, following the JavaBean "
+"specification"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:206
+#, no-c-format
+msgid "Mapping properties multiple times"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:208
+#, no-c-format
+msgid ""
+"Sometimes one has to map a property multiple times per index, with slightly "
+"different indexing strategies. For example, sorting a query by field "
+"requires the field to be <literal>UN_TOKENIZED</literal>. If one wants to "
+"search by words in this property and still sort it, one need to index it "
+"twice - once tokenized and once untokenized. @Fields allows to achieve this "
+"goal."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:216
+#, no-c-format
+msgid "Using @Fields to map a property multiple times"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:218
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed(index = \"Book\" )\n"
+"public class Book {\n"
+" <emphasis role=\"bold\">@Fields( {</emphasis>\n"
+" @Field(index = Index.TOKENIZED),\n"
+" @Field(name = \"summary_forSort\", index = Index.UN_TOKENIZED, "
+"store = Store.YES)\n"
+" <emphasis role=\"bold\">} )</emphasis>\n"
+" public String getSummary() {\n"
+" return summary;\n"
+" }\n"
+"\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:221
+#, no-c-format
+msgid ""
+"The field <literal>summary</literal> is indexed twice, once as "
+"<literal>summary</literal> in a tokenized way, and once as "
+"<literal>summary_forSort</literal> in an untokenized way. @Field supports 2 "
+"attributes useful when @Fields is used:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:228
+#, no-c-format
+msgid ""
+"analyzer: defines a @Analyzer annotation per field rather than per property"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:233
+#, no-c-format
+msgid ""
+"bridge: defines a @FieldBridge annotation per field rather than per property"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:238
+#, no-c-format
+msgid "See below for more information about analyzers and field bridges."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:243
+#, no-c-format
+msgid "Embedded and associated objects"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:245
+#, no-c-format
+msgid ""
+"Associated objects as well as embedded objects can be indexed as part of the "
+"root entity index. This is useful if you expect to search a given entity "
+"based on properties of associated objects. In the following example the aim "
+"is to return places where the associated city is Atlanta (In the Lucene "
+"query parser language, it would translate into <code>address.city:Atlanta</"
+"code>)."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:253
+#, no-c-format
+msgid "Using @IndexedEmbedded to index associations"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:255
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"public class Place {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Long id;\n"
+"\n"
+" @Field( index = Index.TOKENIZED )\n"
+" private String name;\n"
+"\n"
+" @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )\n"
+" <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+" private Address address;\n"
+" ....\n"
+"}\n"
+"\n"
+"@Entity\n"
+"public class Address {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" private Long id;\n"
+"\n"
+" @Field(index=Index.TOKENIZED)\n"
+" private String street;\n"
+"\n"
+" @Field(index=Index.TOKENIZED)\n"
+" private String city;\n"
+"\n"
+" <emphasis role=\"bold\">@ContainedIn</emphasis>\n"
+" @OneToMany(mappedBy=\"address\")\n"
+" private Set<Place> places;\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:258
+#, no-c-format
+msgid ""
+"In this example, the place fields will be indexed in the <literal>Place</"
+"literal> index. The <literal>Place</literal> index documents will also "
+"contain the fields <literal>address.id</literal>, <literal>address.street</"
+"literal>, and <literal>address.city</literal> which you will be able to "
+"query. This is enabled by the <literal>@IndexedEmbedded</literal> annotation."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:265
+#, no-c-format
+msgid ""
+"Be careful. Because the data is denormalized in the Lucene index when using "
+"the <classname>@IndexedEmbedded</classname> technique, Hibernate Search "
+"needs to be aware of any change in the <classname>Place</classname> object "
+"and any change in the <classname>Address</classname> object to keep the "
+"index up to date. To make sure the <literal><classname>Place</classname></"
+"literal> Lucene document is updated when it's <classname>Address</classname> "
+"changes, you need to mark the other side of the bidirectional relationship "
+"with <classname>@ContainedIn</classname>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:275
+#, no-c-format
+msgid ""
+"<literal>@ContainedIn</literal> is only useful on associations pointing to "
+"entities as opposed to embedded (collection of) objects."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:279
+#, no-c-format
+msgid "Let's make our example a bit more complex:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:282
+#, no-c-format
+msgid ""
+"Nested usage of <classname>@IndexedEmbedded</classname> and "
+"<classname>@ContainedIn</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:285
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"public class Place {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Long id;\n"
+"\n"
+" @Field( index = Index.TOKENIZED )\n"
+" private String name;\n"
+"\n"
+" @OneToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )\n"
+" <emphasis role=\"bold\">@IndexedEmbedded</emphasis>\n"
+" private Address address;\n"
+" ....\n"
+"}\n"
+"\n"
+"@Entity\n"
+"public class Address {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" private Long id;\n"
+"\n"
+" @Field(index=Index.TOKENIZED)\n"
+" private String street;\n"
+"\n"
+" @Field(index=Index.TOKENIZED)\n"
+" private String city;\n"
+"\n"
+" <emphasis role=\"bold\">@IndexedEmbedded(depth = 1, prefix = \"ownedBy_"
+"\")</emphasis>\n"
+" private Owner ownedBy;\n"
+"\n"
+" <emphasis role=\"bold\">@ContainedIn</emphasis>\n"
+" @OneToMany(mappedBy=\"address\")\n"
+" private Set<Place> places;\n"
+" ...\n"
+"}\n"
+"\n"
+"@Embeddable\n"
+"public class Owner {\n"
+" @Field(index = Index.TOKENIZED)\n"
+" private String name;\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:288
+#, no-c-format
+msgid ""
+"Any <literal>@*ToMany, @*ToOne</literal> and <literal>@Embedded</literal> "
+"attribute can be annotated with <literal>@IndexedEmbedded</literal>. The "
+"attributes of the associated class will then be added to the main entity "
+"index. In the previous example, the index will contain the following fields"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:296
+#, no-c-format
+msgid "<para>id</para>"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:300
+#, no-c-format
+msgid "name"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:304
+#, no-c-format
+msgid "address.street"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:308
+#, no-c-format
+msgid "address.city"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:312
+#, no-c-format
+msgid "address.ownedBy_name"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:316
+#, no-c-format
+msgid ""
+"The default prefix is <literal>propertyName.</literal>, following the "
+"traditional object navigation convention. You can override it using the "
+"<literal>prefix</literal> attribute as it is shown on the <literal>ownedBy</"
+"literal> property."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:322
+#, no-c-format
+msgid "The prefix cannot be set to the empty string."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:325
+#, no-c-format
+msgid ""
+"The<literal> depth</literal> property is necessary when the object graph "
+"contains a cyclic dependency of classes (not instances). For example, if "
+"<classname>Owner</classname> points to <classname>Place</classname>. "
+"Hibernate Search will stop including Indexed embedded attributes after "
+"reaching the expected depth (or the object graph boundaries are reached). A "
+"class having a self reference is an example of cyclic dependency. In our "
+"example, because <literal>depth</literal> is set to 1, any "
+"<literal>@IndexedEmbedded</literal> attribute in Owner (if any) will be "
+"ignored."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:336
+#, no-c-format
+msgid ""
+"Using <literal>@IndexedEmbedded</literal> for object associations allows you "
+"to express queries such as:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:341
+#, no-c-format
+msgid ""
+"Return places where name contains JBoss and where address city is Atlanta. "
+"In Lucene query this would be"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:344
+#, no-c-format
+msgid "+name:jboss +address.city:atlanta"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:348
+#, no-c-format
+msgid ""
+"Return places where name contains JBoss and where owner's name contain Joe. "
+"In Lucene query this would be"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:351
+#, no-c-format
+msgid "+name:jboss +address.orderBy_name:joe"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:355
+#, no-c-format
+msgid ""
+"In a way it mimics the relational join operation in a more efficient way (at "
+"the cost of data duplication). Remember that, out of the box, Lucene indexes "
+"have no notion of association, the join operation is simply non-existent. It "
+"might help to keep the relational model normalized while benefiting from the "
+"full text index speed and feature richness."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:363
+#, no-c-format
+msgid ""
+"An associated object can itself (but does not have to) be <literal>@Indexed</"
+"literal>"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:367
+#, no-c-format
+msgid ""
+"When @IndexedEmbedded points to an entity, the association has to be "
+"directional and the other side has to be annotated <literal>@ContainedIn</"
+"literal> (as seen in the previous example). If not, Hibernate Search has no "
+"way to update the root index when the associated entity is updated (in our "
+"example, a <literal>Place</literal> index document has to be updated when "
+"the associated <classname>Address</classname> instance is updated)."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:375
+#, no-c-format
+msgid ""
+"Sometimes, the object type annotated by <classname>@IndexedEmbedded</"
+"classname> is not the object type targeted by Hibernate and Hibernate "
+"Search. This is especially the case when interfaces are used in lieu of "
+"their implementation. For this reason you can override the object type "
+"targeted by Hibernate Search using the <methodname>targetElement</"
+"methodname> parameter."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:383
+#, no-c-format
+msgid ""
+"Using the <literal>targetElement</literal> property of "
+"<classname>@IndexedEmbedded</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:386
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"public class Address {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Long id;\n"
+"\n"
+" @Field(index= Index.TOKENIZED)\n"
+" private String street;\n"
+"\n"
+" @IndexedEmbedded(depth = 1, prefix = \"ownedBy_\", <emphasis role=\"bold"
+"\">targetElement = Owner.class</emphasis>)\n"
+" @Target(Owner.class)\n"
+" private Person ownedBy;\n"
+"\n"
+"\n"
+" ...\n"
+"}\n"
+"\n"
+"@Embeddable\n"
+"public class Owner implements Person { ... }"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:391
+#, no-c-format
+msgid "Boost factor"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:393
+#, no-c-format
+msgid ""
+"Lucene has the notion of <emphasis>boost factor</emphasis>. It's a way to "
+"give more weight to a field or to an indexed element over others during the "
+"indexation process. You can use <literal>@Boost</literal> at the @Field, "
+"method or class level."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:399
+#, no-c-format
+msgid ""
+"Using different ways of increasing the weight of an indexed element using a "
+"boost factor"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:402
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed(index=\"indexes/essays\")\n"
+"<emphasis role=\"bold\">@Boost(1.7f)</emphasis>\n"
+"public class Essay {\n"
+" ...\n"
+"\n"
+" @Id\n"
+" @DocumentId\n"
+" public Long getId() { return id; }\n"
+"\n"
+" @Field(name=\"Abstract\", index=Index.TOKENIZED, store=Store.YES, "
+"boost=<emphasis\n"
+" role=\"bold\">@Boost(2f)</emphasis>)\n"
+" <emphasis role=\"bold\">@Boost(1.5f)</emphasis>\n"
+" public String getSummary() { return summary; }\n"
+"\n"
+" @Lob\n"
+" @Field(index=Index.TOKENIZED, boost=<emphasis role=\"bold\">@Boost(1.2f)"
+"</emphasis>)\n"
+" public String getText() { return text; }\n"
+"\n"
+" @Field\n"
+" public String getISBN() { return isbn; }\n"
+"\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:405
+#, no-c-format
+msgid ""
+"In our example, <classname>Essay</classname>'s probability to reach the top "
+"of the search list will be multiplied by 1.7. The <methodname>summary</"
+"methodname> field will be 3.0 (2 * 1.5 - <methodname>@Field.boost</"
+"methodname> and <classname>@Boost</classname> on a property are cumulative) "
+"more important than the <methodname>isbn</methodname> field. The "
+"<methodname>text</methodname> field will be 1.2 times more important than "
+"the <methodname>isbn</methodname> field. Note that this explanation in "
+"strictest terms is actually wrong, but it is simple and close enough to "
+"reality for all practical purposes. Please check the Lucene documentation or "
+"the excellent <citetitle>Lucene In Action </citetitle> from Otis Gospodnetic "
+"and Erik Hatcher."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:420
+#, no-c-format
+msgid "Analyzer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:422
+#, no-c-format
+msgid ""
+"The default analyzer class used to index tokenized fields is configurable "
+"through the <literal>hibernate.search.analyzer</literal> property. The "
+"default value for this property is <classname>org.apache.lucene.analysis."
+"standard.StandardAnalyzer</classname>."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:427
+#, no-c-format
+msgid ""
+"You can also define the analyzer class per entity, property and even per "
+"@Field (useful when multiple fields are indexed from a single property)."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:432
+#, no-c-format
+msgid "Different ways of specifying an analyzer"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:434
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"<emphasis role=\"bold\">@Analyzer(impl = EntityAnalyzer.class)</emphasis>\n"
+"public class MyEntity {\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Integer id;\n"
+"\n"
+" @Field(index = Index.TOKENIZED)\n"
+" private String name;\n"
+"\n"
+" @Field(index = Index.TOKENIZED)\n"
+" <emphasis role=\"bold\">@Analyzer(impl = PropertyAnalyzer.class)</"
+"emphasis>\n"
+" private String summary;\n"
+"\n"
+" @Field(index = Index.TOKENIZED, <emphasis><emphasis role=\"bold"
+"\">analyzer = @Analyzer(impl = FieldAnalyzer.class</emphasis>)</emphasis>\n"
+" private String body;\n"
+"\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:437
+#, no-c-format
+msgid ""
+"In this example, <classname>EntityAnalyzer</classname> is used to index all "
+"tokenized properties (eg. <literal>name</literal>), except <literal>summary</"
+"literal> and <literal>body</literal> which are indexed with "
+"<classname>PropertyAnalyzer</classname> and <classname>FieldAnalyzer</"
+"classname> respectively."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:444
+#, no-c-format
+msgid ""
+"Mixing different analyzers in the same entity is most of the time a bad "
+"practice. It makes query building more complex and results less predictable "
+"(for the novice), especially if you are using a QueryParser (which uses the "
+"same analyzer for the whole query). As a rule of thumb, for any given field "
+"the same analyzer should be used for indexing and querying."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:453
+#, no-c-format
+msgid "Analyzer definitions"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:455
+#, no-c-format
+msgid ""
+"Analyzers can become quite complex to deal with for which reason Hibernate "
+"Search introduces the notion of analyzer definitions. An analyzer definition "
+"can be reused by many <classname>@Analyzer</classname> declarations. An "
+"analyzer definition is composed of:"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:463
+#, no-c-format
+msgid "a name: the unique string used to refer to the definition"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:468
+#, no-c-format
+msgid ""
+"a tokenizer: responsible for tokenizing the input stream into individual "
+"words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:473
+#, no-c-format
+msgid ""
+"a list of filters: each filter is responsible to remove, modify or sometimes "
+"even add words into the stream provided by the tokenizer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:479
+#, no-c-format
+msgid ""
+"This separation of tasks - a tokenizer followed by a list of filters - "
+"allows for easy reuse of each individual component and let you build your "
+"customized analyzer in a very flexible way (just like Lego). Generally "
+"speaking the <classname>Tokenizer</classname> starts the analysis process by "
+"turning the character input into tokens which are then further processed by "
+"the <classname>TokenFilter</classname>s. Hibernate Search supports this "
+"infrastructure by utilizing the Solr analyzer framework. Make sure to "
+"add<filename> solr-core.jar and </filename><filename>solr-common.jar</"
+"filename> to your classpath to use analyzer definitions. In case you also "
+"want to utilizing a snowball stemmer also include the <filename>lucene-"
+"snowball.jar.</filename> Other Solr analyzers might depend on more "
+"libraries. For example, the <classname>PhoneticFilterFactory</classname> "
+"depends on <ulink url=\"http://commons.apache.org/codec\">commons-codec</"
+"ulink>. Your distribution of Hibernate Search provides these dependencies in "
+"its <filename>lib</filename> directory."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:498
+#, no-c-format
+msgid "<classname>@AnalyzerDef</classname> and the Solr framework"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:501
+#, no-c-format
+msgid ""
+"@AnalyzerDef(name=\"customanalyzer\",\n"
+" tokenizer = @TokenizerDef(factory = StandardTokenizerFactory."
+"class),\n"
+" filters = {\n"
+" @TokenFilterDef(factory = ISOLatin1AccentFilterFactory."
+"class),\n"
+" @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+" @TokenFilterDef(factory = StopFilterFactory.class, params = "
+"{\n"
+" @Parameter(name=\"words\", value= \"org/hibernate/search/"
+"test/analyzer/solr/stoplist.properties\" ),\n"
+" @Parameter(name=\"ignoreCase\", value=\"true\")\n"
+" })\n"
+"})\n"
+"public class Team {\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:504
+#, no-c-format
+msgid ""
+"A tokenizer is defined by its factory which is responsible for building the "
+"tokenizer and using the optional list of parameters. This example use the "
+"standard tokenizer. A filter is defined by its factory which is responsible "
+"for creating the filter instance using the optional parameters. In our "
+"example, the StopFilter filter is built reading the dedicated words property "
+"file and is expected to ignore case. The list of parameters is dependent on "
+"the tokenizer or filter factory."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:514
+#, no-c-format
+msgid ""
+"Filters are applied in the order they are defined in the "
+"<classname>@AnalyzerDef</classname> annotation. Make sure to think twice "
+"about this order."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:519
+#, no-c-format
+msgid ""
+"Once defined, an analyzer definition can be reused by an "
+"<classname>@Analyzer</classname> declaration using the definition name "
+"rather than declaring an implementation class."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:524
+#, no-c-format
+msgid "Referencing an analyzer by name"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:526
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"@AnalyzerDef(name=\"customanalyzer\", ... )\n"
+"public class Team {\n"
+" @Id\n"
+" @DocumentId\n"
+" @GeneratedValue\n"
+" private Integer id;\n"
+"\n"
+" @Field\n"
+" private String name;\n"
+"\n"
+" @Field\n"
+" private String location;\n"
+"\n"
+" @Field <emphasis role=\"bold\">@Analyzer(definition = \"customanalyzer\")"
+"</emphasis>\n"
+" private String description;\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:529
+#, no-c-format
+msgid ""
+"Analyzer instances declared by <classname>@AnalyzerDef</classname> are "
+"available by their name in the <classname>SearchFactory</classname>."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:533
+#, no-c-format
+msgid ""
+"Analyzer analyzer = fullTextSession.getSearchFactory().getAnalyzer"
+"(\"customanalyzer\");"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:535
+#, no-c-format
+msgid ""
+"This is quite useful wen building queries. Fields in queries should be "
+"analyzed with the same analyzer used to index the field so that they speak a "
+"common \"language\": the same tokens are reused between the query and the "
+"indexing process. This rule has some exceptions but is true most of the "
+"time. Respect it unless you know what you are doing."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:544
+#, no-c-format
+msgid "Available analyzers"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:546
+#, no-c-format
+msgid ""
+"Solr and Lucene come with a lot of useful default tokenizers and filters. "
+"You can find a complete list of tokenizer factories and filter factories at "
+"<ulink url=\"http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters"
+"\">http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters</ulink>. Let "
+"check a few of them."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:553
+#, no-c-format
+msgid "Some of the available tokenizers"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:558 mapping.xml:593
+#, no-c-format
+msgid "Factory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:560 mapping.xml:595
+#, no-c-format
+msgid "Description"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:562 mapping.xml:597
+#, no-c-format
+msgid "parameters"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:568
+#, no-c-format
+msgid "StandardTokenizerFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:570
+#, no-c-format
+msgid "Use the Lucene StandardTokenizer"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:572 mapping.xml:581 mapping.xml:607 mapping.xml:615
+#: mapping.xml:647
+#, no-c-format
+msgid "none"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:576
+#, no-c-format
+msgid "HTMLStripStandardTokenizerFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:578
+#, no-c-format
+msgid "Remove HTML tags, keep the text and pass it to a StandardTokenizer"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:588
+#, no-c-format
+msgid "Some of the available filters"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:603
+#, no-c-format
+msgid "StandardFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:605
+#, no-c-format
+msgid "Remove dots from acronyms and 's from words"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:611
+#, no-c-format
+msgid "LowerCaseFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:613
+#, no-c-format
+msgid "Lowercase words"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:619
+#, no-c-format
+msgid "StopFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:621
+#, no-c-format
+msgid "remove words (tokens) matching a list of stop words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:624
+#, no-c-format
+msgid ""
+"<literal>words</literal>: points to a resource file containing the stop words"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:625
+#, no-c-format
+msgid ""
+"ignoreCase: true if <literal>case</literal> should be ignore when comparing "
+"stop words, <literal>false</literal> otherwise"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:631
+#, no-c-format
+msgid "SnowballPorterFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:633
+#, no-c-format
+msgid ""
+"Reduces a word to it's root in a given language. (eg. protect, protects, "
+"protection share the same root). Using such a filter allows searches "
+"matching related words."
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:637
+#, no-c-format
+msgid ""
+"<literal>language</literal>: Danish, Dutch, English, Finnish, French, "
+"German, Italian, Norwegian, Portuguese, Russian, Spanish, Swedish and a few "
+"more"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:643
+#, no-c-format
+msgid "ISOLatin1AccentFilterFactory"
+msgstr ""
+
+#. Tag: entry
+#: mapping.xml:645
+#, no-c-format
+msgid "remove accents for languages like French"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:653
+#, no-c-format
+msgid ""
+"We recommend to check all the implementations of <classname>org.apache.solr."
+"analysis.TokenizerFactory</classname> and <classname>org.apache.solr."
+"analysis.TokenFilterFactory</classname> in your IDE to see the "
+"implementations available."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:660
+#, no-c-format
+msgid "Analyzer discriminator (experimental)"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:662
+#, no-c-format
+msgid ""
+"So far all the introduced ways to specify an analyzer were static. However, "
+"there are use cases where it is useful to select an analyzer depending on "
+"the current state of the entity to be indexed, for example in multilingual "
+"application. For an <classname>BlogEntry</classname> class for example the "
+"analyzer could depend on the language property of the entry. Depending on "
+"this property the correct language specific stemmer should be chosen to "
+"index the actual text."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:671
+#, no-c-format
+msgid ""
+"To enable this dynamic analyzer selection Hibernate Search introduces the "
+"<classname>AnalyzerDiscriminator</classname> annotation. The following "
+"example demonstrates the usage of this annotation:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:677
+#, no-c-format
+msgid ""
+"Usage of @AnalyzerDiscriminator in order to select an analyzer depending on "
+"the entity state"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:680
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"@AnalyzerDefs({\n"
+" @AnalyzerDef(name = \"en\",\n"
+" tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+" filters = {\n"
+" @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+" @TokenFilterDef(factory = EnglishPorterFilterFactory.class\n"
+" )\n"
+" }),\n"
+" @AnalyzerDef(name = \"de\",\n"
+" tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),\n"
+" filters = {\n"
+" @TokenFilterDef(factory = LowerCaseFilterFactory.class),\n"
+" @TokenFilterDef(factory = GermanStemFilterFactory.class)\n"
+" })\n"
+"})\n"
+"public class BlogEntry {\n"
+"\n"
+" @Id\n"
+" @GeneratedValue\n"
+" @DocumentId\n"
+" private Integer id;\n"
+"\n"
+" @Field\n"
+" @AnalyzerDiscriminator(impl = LanguageDiscriminator.class)\n"
+" private String language;\n"
+" \n"
+" @Field\n"
+" private String text;\n"
+" \n"
+" private Set<BlogEntry> references;\n"
+"\n"
+" // standard getter/setter\n"
+" ...\n"
+"}"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:682
+#, no-c-format
+msgid ""
+"public class LanguageDiscriminator implements Discriminator {\n"
+"\n"
+" public String getAnalyzerDefinitionName(Object value, Object entity, "
+"String field) {\n"
+" if ( value == null || !( entity instanceof Article ) ) {\n"
+" return null;\n"
+" }\n"
+" return (String) value;\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:685
+#, no-c-format
+msgid ""
+"The prerequisite for using <classname>@AnalyzerDiscriminator</classname> is "
+"that all analyzers which are going to be used are predefined via "
+"<classname>@AnalyzerDef</classname> definitions. If this is the case one can "
+"place the <classname>@AnalyzerDiscriminator</classname> annotation either on "
+"the class or on a specific property of the entity for which to dynamically "
+"select an analyzer. Via the <literal>impl</literal> parameter of the "
+"<classname>AnalyzerDiscriminator</classname> you specify a concrete "
+"implementation of the <classname>Discriminator</classname> interface. It is "
+"up to you to provide an implementation for this interface. The only method "
+"you have to implement is <classname>getAnalyzerDefinitionName()</classname> "
+"which gets called for each field added to the Lucene document. The entity "
+"which is getting indexed is also passed to the interface method. The "
+"<literal>value</literal> parameter is only set if the "
+"<classname>AnalyzerDiscriminator</classname> is placed on property level "
+"instead of class level. In this case the value represents the current value "
+"of this property."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:705
+#, no-c-format
+msgid ""
+"An implemention of the <classname>Discriminator</classname> interface has to "
+"return the name of an existing analyzer definition if the analyzer should be "
+"set dynamically or <classname>null</classname> if the default analyzer "
+"should not be overridden. The given example assumes that the language "
+"parameter is either 'de' or 'en' which matches the specified names in the "
+"<classname>@AnalyzerDef</classname>s."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:714
+#, no-c-format
+msgid ""
+"The <classname>@AnalyzerDiscriminator</classname> is currently still "
+"experimental and the API might still change. We are hoping for some feedback "
+"from the community about the usefulness and usability of this feature."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:722
+#, no-c-format
+msgid "Retrieving an analyzer"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:724
+#, no-c-format
+msgid ""
+"During indexing time, Hibernate Search is using analyzers under the hood for "
+"you. In some situations, retrieving analyzers can be handy. If your domain "
+"model makes use of multiple analyzers (maybe to benefit from stemming, use "
+"phonetic approximation and so on), you need to make sure to use the same "
+"analyzers when you build your query."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:732
+#, no-c-format
+msgid ""
+"This rule can be broken but you need a good reason for it. If you are "
+"unsure, use the same analyzers."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:736
+#, no-c-format
+msgid ""
+"You can retrieve the scoped analyzer for a given entity used at indexing "
+"time by Hibernate Search. A scoped analyzer is an analyzer which applies the "
+"right analyzers depending on the field indexed: multiple analyzers can be "
+"defined on a given entity each one working on an individual field, a scoped "
+"analyzer unify all these analyzers into a context-aware analyzer. While the "
+"theory seems a bit complex, using the right analyzer in a query is very easy."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:745
+#, no-c-format
+msgid "Using the scoped analyzer when building a full-text query"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:748
+#, no-c-format
+msgid ""
+"org.apache.lucene.queryParser.QueryParser parser = new QueryParser(\n"
+" \"title\", \n"
+" fullTextSession.getSearchFactory().getAnalyzer( Song.class )\n"
+");\n"
+"\n"
+"org.apache.lucene.search.Query luceneQuery = \n"
+" parser.parse( \"title:sky Or title_stemmed:diamond\" );\n"
+"\n"
+"org.hibernate.Query fullTextQuery = \n"
+" fullTextSession.createFullTextQuery( luceneQuery, Song.class );\n"
+"\n"
+"List result = fullTextQuery.list(); //return a list of managed objects"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:751
+#, no-c-format
+msgid ""
+"In the example above, the song title is indexed in two fields: the standard "
+"analyzer is used in the field <literal>title</literal> and a stemming "
+"analyzer is used in the field <literal>title_stemmed</literal>. By using the "
+"analyzer provided by the search factory, the query uses the appropriate "
+"analyzer depending on the field targeted."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:758
+#, no-c-format
+msgid ""
+"If your query targets more that one query and you wish to use your standard "
+"analyzer, make sure to describe it using an analyzer definition. You can "
+"retrieve analyzers by their definition name using <code>searchFactory."
+"getAnalyzer(String)</code>."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:767
+#, no-c-format
+msgid "Property/Field Bridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:769
+#, no-c-format
+msgid ""
+"In Lucene all index fields have to be represented as Strings. For this "
+"reason all entity properties annotated with <literal>@Field</literal> have "
+"to be indexed in a String form. For most of your properties, Hibernate "
+"Search does the translation job for you thanks to a built-in set of bridges. "
+"In some cases, though you need a more fine grain control over the "
+"translation process."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:777
+#, no-c-format
+msgid "Built-in bridges"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:779
+#, no-c-format
+msgid ""
+"Hibernate Search comes bundled with a set of built-in bridges between a Java "
+"property type and its full text representation."
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:784
+#, no-c-format
+msgid "null"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:787
+#, no-c-format
+msgid ""
+"null elements are not indexed. Lucene does not support null elements and "
+"this does not make much sense either."
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:793
+#, no-c-format
+msgid "java.lang.String"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:796
+#, no-c-format
+msgid "String are indexed as is"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:801
+#, no-c-format
+msgid ""
+"short, Short, integer, Integer, long, Long, float, Float, double, Double, "
+"BigInteger, BigDecimal"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:805
+#, no-c-format
+msgid ""
+"Numbers are converted in their String representation. Note that numbers "
+"cannot be compared by Lucene (ie used in ranged queries) out of the box: "
+"they have to be padded"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:808
+#, no-c-format
+msgid ""
+"Using a Range query is debatable and has drawbacks, an alternative approach "
+"is to use a Filter query which will filter the result query to the "
+"appropriate range."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:812
+#, no-c-format
+msgid "Hibernate Search will support a padding mechanism"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:818
+#, no-c-format
+msgid "java.util.Date"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:821
+#, no-c-format
+msgid ""
+"Dates are stored as yyyyMMddHHmmssSSS in GMT time (200611072203012 for Nov "
+"7th of 2006 4:03PM and 12ms EST). You shouldn't really bother with the "
+"internal format. What is important is that when using a DateRange Query, you "
+"should know that the dates have to be expressed in GMT time."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:827
+#, no-c-format
+msgid ""
+"Usually, storing the date up to the millisecond is not necessary. "
+"<literal>@DateBridge</literal> defines the appropriate resolution you are "
+"willing to store in the index ( <literal> <literal>@DateBridge"
+"(resolution=Resolution.DAY)</literal> </literal> ). The date pattern will "
+"then be truncated accordingly."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:834
+#, no-c-format
+msgid ""
+"@Entity \n"
+"@Indexed\n"
+"public class Meeting {\n"
+" @Field(index=Index.UN_TOKENIZED)\n"
+" <emphasis role=\"bold\">@DateBridge(resolution=Resolution.MINUTE)</"
+"emphasis>\n"
+" private Date date;\n"
+" ..."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:837
+#, no-c-format
+msgid ""
+"A Date whose resolution is lower than <literal>MILLISECOND</literal> cannot "
+"be a <literal>@DocumentId</literal>"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:845
+#, no-c-format
+msgid "java.net.URI, java.net.URL"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:848
+#, no-c-format
+msgid "URI and URL are converted to their string representation"
+msgstr ""
+
+#. Tag: term
+#: mapping.xml:854
+#, no-c-format
+msgid "java.lang.Class"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:857
+#, no-c-format
+msgid ""
+"Class are converted to their fully qualified class name. The thread context "
+"classloader is used when the class is rehydrated"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:866
+#, no-c-format
+msgid "Custom Bridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:868
+#, no-c-format
+msgid ""
+"Sometimes, the built-in bridges of Hibernate Search do not cover some of "
+"your property types, or the String representation used by the bridge does "
+"not meet your requirements. The following paragraphs describe several "
+"solutions to this problem."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:874
+#, no-c-format
+msgid "StringBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:876
+#, no-c-format
+msgid ""
+"The simplest custom solution is to give Hibernate Search an implementation "
+"of your expected <emphasis><classname>Object</classname> </emphasis>to "
+"<classname>String</classname> bridge. To do so you need to implements the "
+"<literal>org.hibernate.search.bridge.StringBridge</literal> interface. All "
+"implementations have to be thread-safe as they are used concurrently."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:885
+#, no-c-format
+msgid "Implementing your own <classname>StringBridge</classname>"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:888
+#, no-c-format
+msgid ""
+"/**\n"
+" * Padding Integer bridge.\n"
+" * All numbers will be padded with 0 to match 5 digits\n"
+" *\n"
+" * @author Emmanuel Bernard\n"
+" */\n"
+"public class PaddedIntegerBridge implements <emphasis role=\"bold"
+"\">StringBridge</emphasis> {\n"
+"\n"
+" private int PADDING = 5;\n"
+"\n"
+" <emphasis role=\"bold\">public String objectToString(Object object)</"
+"emphasis> {\n"
+" String rawInteger = ( (Integer) object ).toString();\n"
+" if (rawInteger.length() > PADDING) \n"
+" throw new IllegalArgumentException( \"Try to pad on a number too "
+"big\" );\n"
+" StringBuilder paddedInteger = new StringBuilder( );\n"
+" for ( int padIndex = rawInteger.length() ; padIndex < PADDING ; "
+"padIndex++ ) {\n"
+" paddedInteger.append('0');\n"
+" }\n"
+" return paddedInteger.append( rawInteger ).toString();\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:891
+#, no-c-format
+msgid ""
+"Then any property or field can use this bridge thanks to the "
+"<literal>@FieldBridge</literal> annotation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:894
+#, no-c-format
+msgid ""
+"<emphasis role=\"bold\">@FieldBridge(impl = PaddedIntegerBridge.class)</"
+"emphasis>\n"
+"private Integer length;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:896
+#, no-c-format
+msgid ""
+"Parameters can be passed to the Bridge implementation making it more "
+"flexible. The Bridge implementation implements a "
+"<classname>ParameterizedBridge</classname> interface, and the parameters are "
+"passed through the <literal>@FieldBridge</literal> annotation."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:903
+#, no-c-format
+msgid "Passing parameters to your bridge implementation"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:905
+#, no-c-format
+msgid ""
+"public class PaddedIntegerBridge implements StringBridge, <emphasis\n"
+" role=\"bold\">ParameterizedBridge</emphasis> {\n"
+"\n"
+" public static String PADDING_PROPERTY = \"padding\";\n"
+" private int padding = 5; //default\n"
+"\n"
+" <emphasis role=\"bold\">public void setParameterValues(Map parameters)</"
+"emphasis> {\n"
+" Object padding = parameters.get( PADDING_PROPERTY );\n"
+" if (padding != null) this.padding = (Integer) padding;\n"
+" }\n"
+"\n"
+" public String objectToString(Object object) {\n"
+" String rawInteger = ( (Integer) object ).toString();\n"
+" if (rawInteger.length() > padding) \n"
+" throw new IllegalArgumentException( \"Try to pad on a number too "
+"big\" );\n"
+" StringBuilder paddedInteger = new StringBuilder( );\n"
+" for ( int padIndex = rawInteger.length() ; padIndex < padding ; "
+"padIndex++ ) {\n"
+" paddedInteger.append('0');\n"
+" }\n"
+" return paddedInteger.append( rawInteger ).toString();\n"
+" }\n"
+"}\n"
+"\n"
+"\n"
+"//property\n"
+"@FieldBridge(impl = PaddedIntegerBridge.class,\n"
+" <emphasis role=\"bold\">params = @Parameter(name=\"padding\", "
+"value=\"10\")</emphasis>\n"
+" )\n"
+"private Integer length;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:908
+#, no-c-format
+msgid ""
+"The <classname>ParameterizedBridge</classname> interface can be implemented "
+"by <classname>StringBridge</classname>, <classname>TwoWayStringBridge</"
+"classname>, <classname>FieldBridge</classname> implementations."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:913
+#, no-c-format
+msgid ""
+"All implementations have to be thread-safe, but the parameters are set "
+"during initialization and no special care is required at this stage."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:917
+#, no-c-format
+msgid ""
+"If you expect to use your bridge implementation on an id property (ie "
+"annotated with <literal>@DocumentId</literal> ), you need to use a slightly "
+"extended version of <literal>StringBridge</literal> named "
+"<classname>TwoWayStringBridge</classname>. Hibernate Search needs to read "
+"the string representation of the identifier and generate the object out of "
+"it. There is not difference in the way the <literal>@FieldBridge</literal> "
+"annotation is used."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:926
+#, no-c-format
+msgid ""
+"Implementing a TwoWayStringBridge which can for example be used for id "
+"properties"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:929
+#, no-c-format
+msgid ""
+"public class PaddedIntegerBridge implements TwoWayStringBridge, "
+"ParameterizedBridge {\n"
+"\n"
+" public static String PADDING_PROPERTY = \"padding\";\n"
+" private int padding = 5; //default\n"
+"\n"
+" public void setParameterValues(Map parameters) {\n"
+" Object padding = parameters.get( PADDING_PROPERTY );\n"
+" if (padding != null) this.padding = (Integer) padding;\n"
+" }\n"
+"\n"
+" public String objectToString(Object object) {\n"
+" String rawInteger = ( (Integer) object ).toString();\n"
+" if (rawInteger.length() > padding) \n"
+" throw new IllegalArgumentException( \"Try to pad on a number too "
+"big\" );\n"
+" StringBuilder paddedInteger = new StringBuilder( );\n"
+" for ( int padIndex = rawInteger.length() ; padIndex < padding ; "
+"padIndex++ ) {\n"
+" paddedInteger.append('0');\n"
+" }\n"
+" return paddedInteger.append( rawInteger ).toString();\n"
+" }\n"
+"\n"
+" <emphasis role=\"bold\">public Object stringToObject(String stringValue)"
+"</emphasis> {\n"
+" return new Integer(stringValue);\n"
+" }\n"
+"}\n"
+"\n"
+"\n"
+"//id property\n"
+"@DocumentId\n"
+"@FieldBridge(impl = PaddedIntegerBridge.class,\n"
+" params = @Parameter(name=\"padding\", value=\"10\") \n"
+"private Integer id;"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:932
+#, no-c-format
+msgid ""
+"It is critically important for the two-way process to be idempotent (ie "
+"object = stringToObject( objectToString( object ) ) )."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:938
+#, no-c-format
+msgid "FieldBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:940
+#, no-c-format
+msgid ""
+"Some use cases require more than a simple object to string translation when "
+"mapping a property to a Lucene index. To give you the greatest possible "
+"flexibility you can also implement a bridge as a <classname>FieldBridge</"
+"classname>. This interface gives you a property value and let you map it the "
+"way you want in your Lucene <classname>Document</classname>.The interface is "
+"very similar in its concept to the Hibernate<classname> UserType</"
+"classname>s."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:948
+#, no-c-format
+msgid ""
+"You can for example store a given property in two different document fields:"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:952
+#, no-c-format
+msgid ""
+"Implementing the FieldBridge interface in order to a given property into "
+"multiple document fields"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:955
+#, no-c-format
+msgid ""
+"/**\n"
+" * Store the date in 3 different fields - year, month, day - to ease Range "
+"Query per\n"
+" * year, month or day (eg get all the elements of December for the last 5 "
+"years).\n"
+" * \n"
+" * @author Emmanuel Bernard\n"
+" */\n"
+"public class DateSplitBridge implements FieldBridge {\n"
+" private final static TimeZone GMT = TimeZone.getTimeZone(\"GMT\");\n"
+"\n"
+" <emphasis role=\"bold\">public void set(String name, Object value, "
+"Document document, \n"
+" LuceneOptions luceneOptions)</emphasis> {\n"
+" Date date = (Date) value;\n"
+" Calendar cal = GregorianCalendar.getInstance(GMT);\n"
+" cal.setTime(date);\n"
+" int year = cal.get(Calendar.YEAR);\n"
+" int month = cal.get(Calendar.MONTH) + 1;\n"
+" int day = cal.get(Calendar.DAY_OF_MONTH);\n"
+" \n"
+" // set year\n"
+" Field field = new Field(name + \".year\", String.valueOf(year),\n"
+" luceneOptions.getStore(), luceneOptions.getIndex(),\n"
+" luceneOptions.getTermVector());\n"
+" field.setBoost(luceneOptions.getBoost());\n"
+" document.add(field);\n"
+" \n"
+" // set month and pad it if needed\n"
+" field = new Field(name + \".month\", month < 10 ? \"0\" : \"\"\n"
+" + String.valueOf(month), luceneOptions.getStore(),\n"
+" luceneOptions.getIndex(), luceneOptions.getTermVector());\n"
+" field.setBoost(luceneOptions.getBoost());\n"
+" document.add(field);\n"
+" \n"
+" // set day and pad it if needed\n"
+" field = new Field(name + \".day\", day < 10 ? \"0\" : \"\"\n"
+" + String.valueOf(day), luceneOptions.getStore(),\n"
+" luceneOptions.getIndex(), luceneOptions.getTermVector());\n"
+" field.setBoost(luceneOptions.getBoost());\n"
+" document.add(field);\n"
+" }\n"
+"}\n"
+"\n"
+"//property\n"
+"<emphasis role=\"bold\">@FieldBridge(impl = DateSplitBridge.class)</"
+"emphasis>\n"
+"private Date date;"
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:960
+#, no-c-format
+msgid "ClassBridge"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:962
+#, no-c-format
+msgid ""
+"It is sometimes useful to combine more than one property of a given entity "
+"and index this combination in a specific way into the Lucene index. The "
+"<classname>@ClassBridge</classname> and <classname>@ClassBridge</classname> "
+"annotations can be defined at the class level (as opposed to the property "
+"level). In this case the custom field bridge implementation receives the "
+"entity instance as the value parameter instead of a particular property. "
+"Though not shown in this example, <classname>@ClassBridge</classname> "
+"supports the <methodname>termVector</methodname> attribute discussed in "
+"section <xref linkend=\"basic-mapping\"/>."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:974
+#, no-c-format
+msgid "Implementing a class bridge"
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:976
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"<emphasis role=\"bold\">@ClassBridge</emphasis>(name=\"branchnetwork\",\n"
+" index=Index.TOKENIZED,\n"
+" store=Store.YES,\n"
+" impl = <emphasis role=\"bold\">CatFieldsClassBridge.class</"
+"emphasis>,\n"
+" params = @Parameter( name=\"sepChar\", value=\" \" ) )\n"
+"public class Department {\n"
+" private int id;\n"
+" private String network;\n"
+" private String branchHead;\n"
+" private String branch;\n"
+" private Integer maxEmployees\n"
+" ...\n"
+"}\n"
+"\n"
+"\n"
+"public class CatFieldsClassBridge implements FieldBridge, "
+"ParameterizedBridge {\n"
+" private String sepChar;\n"
+"\n"
+" public void setParameterValues(Map parameters) {\n"
+" this.sepChar = (String) parameters.get( \"sepChar\" );\n"
+" }\n"
+"\n"
+" <emphasis role=\"bold\">public void set(String name, Object value, "
+"Document document, LuceneOptions luceneOptions)</emphasis> {\n"
+" // In this particular class the name of the new field was passed\n"
+" // from the name field of the ClassBridge Annotation. This is not\n"
+" // a requirement. It just works that way in this instance. The\n"
+" // actual name could be supplied by hard coding it below.\n"
+" Department dep = (Department) value;\n"
+" String fieldValue1 = dep.getBranch();\n"
+" if ( fieldValue1 == null ) {\n"
+" fieldValue1 = \"\";\n"
+" }\n"
+" String fieldValue2 = dep.getNetwork();\n"
+" if ( fieldValue2 == null ) {\n"
+" fieldValue2 = \"\";\n"
+" }\n"
+" String fieldValue = fieldValue1 + sepChar + fieldValue2;\n"
+" Field field = new Field( name, fieldValue, luceneOptions.getStore(), "
+"luceneOptions.getIndex(), luceneOptions.getTermVector() );\n"
+" field.setBoost( luceneOptions.getBoost() );\n"
+" document.add( field );\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:979
+#, no-c-format
+msgid ""
+"In this example, the particular <classname>CatFieldsClassBridge</classname> "
+"is applied to the <literal>department</literal> instance, the field bridge "
+"then concatenate both branch and network and index the concatenation."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:989 mapping.xml:1014
+#, no-c-format
+msgid "Providing your own id"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:992
+#, no-c-format
+msgid "This part of the documentation is a work in progress."
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:995
+#, no-c-format
+msgid ""
+"You can provide your own id for Hibernate Search if you are extending the "
+"internals. You will have to generate a unique value so it can be given to "
+"Lucene to be indexed. This will have to be given to Hibernate Search when "
+"you create an org.hibernate.search.Work object - the document id is required "
+"in the constructor."
+msgstr ""
+
+#. Tag: title
+#: mapping.xml:1002
+#, no-c-format
+msgid "The ProvidedId annotation"
+msgstr ""
+
+#. Tag: para
+#: mapping.xml:1004
+#, no-c-format
+msgid ""
+"Unlike conventional Hibernate Search API and @DocumentId, this annotation is "
+"used on the class and not a field. You also can provide your own bridge "
+"implementation when you put in this annotation by calling the bridge() which "
+"is on @ProvidedId. Also, if you annotate a class with @ProvidedId, your "
+"subclasses will also get the annotation - but it is not done by using the "
+"java.lang.annotations.@Inherited. Be sure however, to <emphasis>not</"
+"emphasis> use this annotation with @DocumentId as your system will break."
+msgstr ""
+
+#. Tag: programlisting
+#: mapping.xml:1016
+#, no-c-format
+msgid ""
+"@ProvidedId (bridge = org.my.own.package.MyCustomBridge)\n"
+"@Indexed\n"
+"public class MyClass{\n"
+" @Field\n"
+" String MyString;\n"
+" ...\n"
+"}"
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/mapping.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/optimize.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/optimize.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/optimize.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,215 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: optimize.xml:30
+#, no-c-format
+msgid "Index Optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:32
+#, no-c-format
+msgid ""
+"From time to time, the Lucene index needs to be optimized. The process is "
+"essentially a defragmentation. Until an optimization is triggered Lucene "
+"only marks deleted documents as such, no physical deletions are applied. "
+"During the optimization process the deletions will be applied which also "
+"effects the number of files in the Lucene Directory."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:38
+#, no-c-format
+msgid ""
+"Optimizing the Lucene index speeds up searches but has no effect on the "
+"indexation (update) performance. During an optimization, searches can be "
+"performed, but will most likely be slowed down. All index updates will be "
+"stopped. It is recommended to schedule optimization:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:45
+#, no-c-format
+msgid "on an idle system or when the searches are less frequent"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:49
+#, no-c-format
+msgid "after a lot of index modifications"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:54
+#, no-c-format
+msgid "Automatic optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:56
+#, no-c-format
+msgid "Hibernate Search can automatically optimize an index after:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:60
+#, no-c-format
+msgid "a certain amount of operations (insertion, deletion)"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:64
+#, no-c-format
+msgid "or a certain amount of transactions"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:68
+#, no-c-format
+msgid ""
+"The configuration for automatic index optimization can be defined on a "
+"global level or per index:"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:72
+#, no-c-format
+msgid "Defining automatic optimization parameters"
+msgstr ""
+
+#. Tag: programlisting
+#: optimize.xml:74
+#, no-c-format
+msgid ""
+"hibernate.search.default.optimizer.operation_limit.max = 1000\n"
+"hibernate.search.default.optimizer.transaction_limit.max = 100\n"
+"hibernate.search.Animal.optimizer.transaction_limit.max = 50"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:77
+#, no-c-format
+msgid ""
+"An optimization will be triggered to the <literal>Animal</literal> index as "
+"soon as either:"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:82
+#, no-c-format
+msgid "the number of additions and deletions reaches 1000"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:86
+#, no-c-format
+msgid ""
+"the number of transactions reaches 50 (<constant>hibernate.search.Animal."
+"optimizer.transaction_limit.max</constant> having priority over "
+"<constant>hibernate.search.default.optimizer.transaction_limit.max</"
+"constant>)"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:93
+#, no-c-format
+msgid ""
+"If none of these parameters are defined, no optimization is processed "
+"automatically."
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:98
+#, no-c-format
+msgid "Manual optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:100
+#, no-c-format
+msgid ""
+"You can programmatically optimize (defragment) a Lucene index from Hibernate "
+"Search through the <classname>SearchFactory</classname>:"
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:104
+#, no-c-format
+msgid "Programmatic index optimization"
+msgstr ""
+
+#. Tag: programlisting
+#: optimize.xml:106
+#, no-c-format
+msgid ""
+"FullTextSession fullTextSession = Search.getFullTextSession"
+"(regularSession);\n"
+"SearchFactory searchFactory = fullTextSession.getSearchFactory();\n"
+"\n"
+"searchFactory.optimize(Order.class);\n"
+"// or\n"
+"searchFactory.optimize();"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:109
+#, no-c-format
+msgid ""
+"The first example optimizes the Lucene index holding <classname>Order</"
+"classname>s; the second, optimizes all indexes."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:113
+#, no-c-format
+msgid ""
+"<literal>searchFactory.optimize()</literal> has no effect on a JMS backend. "
+"You must apply the optimize operation on the Master node."
+msgstr ""
+
+#. Tag: title
+#: optimize.xml:120
+#, no-c-format
+msgid "Adjusting optimization"
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:122
+#, no-c-format
+msgid ""
+"Apache Lucene has a few parameters to influence how optimization is "
+"performed. Hibernate Search exposes those parameters."
+msgstr ""
+
+#. Tag: para
+#: optimize.xml:125
+#, no-c-format
+msgid ""
+"Further index optimization parameters include: <itemizedlist> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].max_buffered_docs</literal> </listitem> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].max_field_length</literal> </listitem> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].max_merge_docs</literal> </listitem> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].merge_factor</literal> </listitem> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].ram_buffer_size</literal> </listitem> <listitem> "
+"<literal>hibernate.search.[default|<indexname>].indexwriter.[batch|"
+"transaction].term_index_interval</literal> </listitem> </itemizedlist> See "
+"<xref linkend=\"lucene-indexing-performance\"/> for more details."
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/optimize.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/query.po
===================================================================
--- search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/query.po (rev 0)
+++ search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/query.po 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1349 @@
+# Chinese translations for PACKAGE package.
+# Automatically generated, 2009.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: http://bugs.kde.org\n"
+"POT-Creation-Date: 2009-05-26 15:46+0000\n"
+"PO-Revision-Date: 2009-05-26 15:46+0000\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. Tag: title
+#: query.xml:30
+#, no-c-format
+msgid "Querying"
+msgstr ""
+
+#. Tag: para
+#: query.xml:32
+#, no-c-format
+msgid ""
+"The second most important capability of Hibernate Search is the ability to "
+"execute a Lucene query and retrieve entities managed by an Hibernate "
+"session, providing the power of Lucene without leaving the Hibernate "
+"paradigm, and giving another dimension to the Hibernate classic search "
+"mechanisms (HQL, Criteria query, native SQL query). Preparing and executing "
+"a query consists of four simple steps:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:41
+#, no-c-format
+msgid "Creating a <classname>FullTextSession</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:45
+#, no-c-format
+msgid "<para>Creating a Lucene query</para>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:49
+#, no-c-format
+msgid ""
+"Wrapping the Lucene query using a <classname>org.hibernate.Query</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:54
+#, no-c-format
+msgid ""
+"Executing the search by calling for example <methodname>list()</methodname> "
+"or <methodname>scroll()</methodname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:60
+#, no-c-format
+msgid ""
+"To access the querying facilities, you have to use an "
+"<classname>FullTextSession</classname>. This Search specific session wraps a "
+"regular <classname>org.hibernate.Session</classname> to provide query and "
+"indexing capabilities."
+msgstr ""
+
+#. Tag: title
+#: query.xml:66
+#, no-c-format
+msgid "Creating a FullTextSession"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:68
+#, no-c-format
+msgid ""
+"Session session = sessionFactory.openSession();\n"
+"...\n"
+"FullTextSession fullTextSession = Search.getFullTextSession(session);"
+msgstr ""
+
+#. Tag: para
+#: query.xml:71
+#, no-c-format
+msgid ""
+"The actual search facility is built on native Lucene queries which the "
+"following example illustrates."
+msgstr ""
+
+#. Tag: title
+#: query.xml:75
+#, no-c-format
+msgid "<title>Creating a Lucene query</title>"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:77
+#, no-c-format
+msgid ""
+"org.apache.lucene.queryParser.QueryParser parser = \n"
+" new QueryParser(\"title\", new StopAnalyzer() );\n"
+"\n"
+"org.apache.lucene.search.Query luceneQuery = parser.parse( \"summary:Festina "
+"Or brand:Seiko\" );\n"
+"<emphasis role=\"bold\">org.hibernate.Query fullTextQuery = fullTextSession."
+"createFullTextQuery( luceneQuery );\n"
+" </emphasis>\n"
+"List result = fullTextQuery.list(); //return a list of managed objects"
+msgstr ""
+
+#. Tag: para
+#: query.xml:80
+#, no-c-format
+msgid ""
+"The Hibernate query built on top of the Lucene query is a regular "
+"<literal>org.hibernate.Query</literal>, which means you are in the same "
+"paradigm as the other Hibernate query facilities (HQL, Native or Criteria). "
+"The regular <literal>list()</literal> , <literal>uniqueResult()</literal>, "
+"<literal>iterate()</literal> and <literal>scroll()</literal> methods can be "
+"used."
+msgstr ""
+
+#. Tag: para
+#: query.xml:87
+#, no-c-format
+msgid ""
+"In case you are using the Java Persistence APIs of Hibernate (aka EJB 3.0 "
+"Persistence), the same extensions exist:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:91
+#, no-c-format
+msgid "Creating a Search query using the JPA API"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:93
+#, no-c-format
+msgid ""
+"EntityManager em = entityManagerFactory.createEntityManager();\n"
+"\n"
+"FullTextEntityManager fullTextEntityManager = \n"
+" org.hibernate.hibernate.search.jpa.Search.getFullTextEntityManager(em);\n"
+"\n"
+"...\n"
+"org.apache.lucene.queryParser.QueryParser parser = \n"
+" new QueryParser(\"title\", new StopAnalyzer() );\n"
+"\n"
+"org.apache.lucene.search.Query luceneQuery = parser.parse( \"summary:Festina "
+"Or brand:Seiko\" );\n"
+"<emphasis role=\"bold\">javax.persistence.Query fullTextQuery = "
+"fullTextEntityManager.createFullTextQuery( luceneQuery );</emphasis>\n"
+"\n"
+"List result = fullTextQuery.getResultList(); //return a list of managed "
+"objects"
+msgstr ""
+
+#. Tag: para
+#: query.xml:96
+#, no-c-format
+msgid ""
+"The following examples we will use the Hibernate APIs but the same example "
+"can be easily rewritten with the Java Persistence API by just adjusting the "
+"way the <classname>FullTextQuery</classname> is retrieved."
+msgstr ""
+
+#. Tag: title
+#: query.xml:102
+#, no-c-format
+msgid "Building queries"
+msgstr ""
+
+#. Tag: para
+#: query.xml:104
+#, no-c-format
+msgid ""
+"Hibernate Search queries are built on top of Lucene queries which gives you "
+"total freedom on the type of Lucene query you want to execute. However, once "
+"built, Hibernate Search wraps further query processing using <classname>org."
+"hibernate.Query</classname> as your primary query manipulation API."
+msgstr ""
+
+#. Tag: title
+#: query.xml:111
+#, no-c-format
+msgid "Building a Lucene query"
+msgstr ""
+
+#. Tag: para
+#: query.xml:113
+#, no-c-format
+msgid ""
+"It is out of the scope of this documentation on how to exactly build a "
+"Lucene query. Please refer to the online Lucene documentation or get hold of "
+"a copy of either Lucene In Action or Hibernate Search in Action."
+msgstr ""
+
+#. Tag: title
+#: query.xml:120
+#, no-c-format
+msgid "Building a Hibernate Search query"
+msgstr ""
+
+#. Tag: title
+#: query.xml:123
+#, no-c-format
+msgid "Generality"
+msgstr ""
+
+#. Tag: para
+#: query.xml:125
+#, no-c-format
+msgid ""
+"Once the Lucene query is built, it needs to be wrapped into an Hibernate "
+"Query."
+msgstr ""
+
+#. Tag: title
+#: query.xml:129
+#, no-c-format
+msgid "Wrapping a Lucene query into a Hibernate Query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:131
+#, no-c-format
+msgid ""
+"FullTextSession fullTextSession = Search.getFullTextSession( session );\n"
+"org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery"
+"( luceneQuery );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:134
+#, no-c-format
+msgid ""
+"If not specified otherwise, the query will be executed against all indexed "
+"entities, potentially returning all types of indexed classes. It is advised, "
+"from a performance point of view, to restrict the returned types:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:140
+#, no-c-format
+msgid "Filtering the search result by entity type"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:142
+#, no-c-format
+msgid ""
+"org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery"
+"( luceneQuery, Customer.class );\n"
+"// or\n"
+"fullTextQuery = fullTextSession.createFullTextQuery( luceneQuery, Item."
+"class, Actor.class );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:145
+#, no-c-format
+msgid ""
+"The first example returns only matching <classname>Customer</classname>s, "
+"the second returns matching <classname>Actor</classname>s and "
+"<classname>Item</classname>s. The type restriction is fully polymorphic "
+"which means that if there are two indexed subclasses <classname>Salesman</"
+"classname> and <classname>Customer</classname> of the baseclass "
+"<classname>Person</classname>, it is possible to just specify "
+"<classname>Person.class</classname> in order to filter on result types."
+msgstr ""
+
+#. Tag: title
+#: query.xml:157
+#, no-c-format
+msgid "Pagination"
+msgstr ""
+
+#. Tag: para
+#: query.xml:159
+#, no-c-format
+msgid ""
+"Out of performance reasons it is recommended to restrict the number of "
+"returned objects per query. In fact is a very common use case anyway that "
+"the user navigates from one page to an other. The way to define pagination "
+"is exactly the way you would define pagination in a plain HQL or Criteria "
+"query."
+msgstr ""
+
+#. Tag: title
+#: query.xml:166
+#, no-c-format
+msgid "Defining pagination for a search query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:168
+#, no-c-format
+msgid ""
+"org.hibernate.Query fullTextQuery = fullTextSession.createFullTextQuery"
+"( luceneQuery, Customer.class );\n"
+"fullTextQuery.setFirstResult(15); //start from the 15th element\n"
+"fullTextQuery.setMaxResults(10); //return 10 elements"
+msgstr ""
+
+#. Tag: para
+#: query.xml:172
+#, no-c-format
+msgid ""
+"It is still possible to get the total number of matching elements regardless "
+"of the pagination via <methodname>fulltextQuery.</"
+"methodname><methodname>getResultSize()</methodname>"
+msgstr ""
+
+#. Tag: title
+#: query.xml:179
+#, no-c-format
+msgid "Sorting"
+msgstr ""
+
+#. Tag: para
+#: query.xml:181
+#, no-c-format
+msgid ""
+"Apache Lucene provides a very flexible and powerful way to sort results. "
+"While the default sorting (by relevance) is appropriate most of the time, it "
+"can be interesting to sort by one or several other properties. In order to "
+"do so set the Lucene Sort object to apply a Lucene sorting strategy."
+msgstr ""
+
+#. Tag: title
+#: query.xml:188
+#, no-c-format
+msgid ""
+"Specifying a Lucene <classname>Sort</classname> in order to sort the results"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:191
+#, no-c-format
+msgid ""
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery( query, "
+"Book.class );\n"
+"org.apache.lucene.search.Sort sort = new Sort(new SortField(\"title\"));\n"
+"<emphasis role=\"bold\">query.setSort(sort);</emphasis>\n"
+"List results = query.list();"
+msgstr ""
+
+#. Tag: para
+#: query.xml:194
+#, no-c-format
+msgid ""
+"One can notice the <classname>FullTextQuery</classname> interface which is a "
+"sub interface of <classname>org.hibernate.Query</classname>. Be aware that "
+"fields used for sorting must not be tokenized."
+msgstr ""
+
+#. Tag: title
+#: query.xml:201
+#, no-c-format
+msgid "Fetching strategy"
+msgstr ""
+
+#. Tag: para
+#: query.xml:203
+#, no-c-format
+msgid ""
+"When you restrict the return types to one class, Hibernate Search loads the "
+"objects using a single query. It also respects the static fetching strategy "
+"defined in your domain model."
+msgstr ""
+
+#. Tag: para
+#: query.xml:207
+#, no-c-format
+msgid ""
+"It is often useful, however, to refine the fetching strategy for a specific "
+"use case."
+msgstr ""
+
+#. Tag: title
+#: query.xml:211
+#, no-c-format
+msgid "Specifying <classname>FetchMode</classname> on a query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:214
+#, no-c-format
+msgid ""
+"Criteria criteria = s.createCriteria( Book.class ).setFetchMode( \"authors"
+"\", FetchMode.JOIN );\n"
+"s.createFullTextQuery( luceneQuery ).setCriteriaQuery( criteria );"
+msgstr ""
+
+#. Tag: para
+#: query.xml:217
+#, no-c-format
+msgid ""
+"In this example, the query will return all Books matching the luceneQuery. "
+"The authors collection will be loaded from the same query using an SQL outer "
+"join."
+msgstr ""
+
+#. Tag: para
+#: query.xml:221
+#, no-c-format
+msgid ""
+"When defining a criteria query, it is not needed to restrict the entity "
+"types returned while creating the Hibernate Search query from the full text "
+"session: the type is guessed from the criteria query itself. Only fetch mode "
+"can be adjusted, refrain from applying any other restriction."
+msgstr ""
+
+#. Tag: para
+#: query.xml:227
+#, no-c-format
+msgid ""
+"One cannot use <methodname>setCriteriaQuery</methodname> if more than one "
+"entity type is expected to be returned."
+msgstr ""
+
+#. Tag: title
+#: query.xml:232
+#, no-c-format
+msgid "Projection"
+msgstr ""
+
+#. Tag: para
+#: query.xml:234
+#, no-c-format
+msgid ""
+"For some use cases, returning the domain object (graph) is overkill. Only a "
+"small subset of the properties is necessary. Hibernate Search allows you to "
+"return a subset of properties:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:239
+#, no-c-format
+msgid "Using projection instead of returning the full domain object"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:242
+#, no-c-format
+msgid ""
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery"
+"( luceneQuery, Book.class );\n"
+"query.<emphasis role=\"bold\">setProjection( \"id\", \"summary\", \"body\", "
+"\"mainAuthor.name\" )</emphasis>;\n"
+"List results = query.list();\n"
+"Object[] firstResult = (Object[]) results.get(0);\n"
+"Integer id = firstResult[0];\n"
+"String summary = firstResult[1];\n"
+"String body = firstResult[2];\n"
+"String authorName = firstResult[3];"
+msgstr ""
+
+#. Tag: para
+#: query.xml:245
+#, no-c-format
+msgid ""
+"Hibernate Search extracts the properties from the Lucene index and convert "
+"them back to their object representation, returning a list of "
+"<classname>Object[]</classname>. Projections avoid a potential database "
+"round trip (useful if the query response time is critical), but has some "
+"constraints:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:253
+#, no-c-format
+msgid ""
+"the properties projected must be stored in the index (<literal>@Field"
+"(store=Store.YES)</literal>), which increase the index size"
+msgstr ""
+
+#. Tag: para
+#: query.xml:259
+#, no-c-format
+msgid ""
+"the properties projected must use a <literal>FieldBridge</literal> "
+"implementing <classname>org.hibernate.search.bridge.TwoWayFieldBridge</"
+"classname> or <literal>org.hibernate.search.bridge.TwoWayStringBridge</"
+"literal>, the latter being the simpler version. All Hibernate Search built-"
+"in types are two-way."
+msgstr ""
+
+#. Tag: para
+#: query.xml:269
+#, no-c-format
+msgid ""
+"you can only project simple properties of the indexed entity or its embedded "
+"associations. This means you cannot project a whole embedded entity."
+msgstr ""
+
+#. Tag: para
+#: query.xml:275
+#, no-c-format
+msgid ""
+"projection does not work on collections or maps which are indexed via "
+"<classname>@IndexedEmbedded</classname>"
+msgstr ""
+
+#. Tag: para
+#: query.xml:280
+#, no-c-format
+msgid ""
+"Projection is useful for another kind of use cases. Lucene provides some "
+"metadata information to the user about the results. By using some special "
+"placeholders, the projection mechanism can retrieve them:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:286
+#, no-c-format
+msgid "Using projection in order to retrieve meta data"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:288
+#, no-c-format
+msgid ""
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery"
+"( luceneQuery, Book.class );\n"
+"query.<emphasis role=\"bold\">setProjection( FullTextQuery.SCORE, "
+"FullTextQuery.THIS, \"mainAuthor.name\" )</emphasis>;\n"
+"List results = query.list();\n"
+"Object[] firstResult = (Object[]) results.get(0);\n"
+"float score = firstResult[0];\n"
+"Book book = firstResult[1];\n"
+"String authorName = firstResult[2];"
+msgstr ""
+
+#. Tag: para
+#: query.xml:291
+#, no-c-format
+msgid ""
+"You can mix and match regular fields and special placeholders. Here is the "
+"list of available placeholders:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:296
+#, no-c-format
+msgid ""
+"FullTextQuery.THIS: returns the initialized and managed entity (as a non "
+"projected query would have done)."
+msgstr ""
+
+#. Tag: para
+#: query.xml:301
+#, no-c-format
+msgid ""
+"FullTextQuery.DOCUMENT: returns the Lucene Document related to the object "
+"projected."
+msgstr ""
+
+#. Tag: para
+#: query.xml:306
+#, no-c-format
+msgid "FullTextQuery.OBJECT_CLASS: returns the class of the indexed entity."
+msgstr ""
+
+#. Tag: para
+#: query.xml:311
+#, no-c-format
+msgid ""
+"FullTextQuery.SCORE: returns the document score in the query. Scores are "
+"handy to compare one result against an other for a given query but are "
+"useless when comparing the result of different queries."
+msgstr ""
+
+#. Tag: para
+#: query.xml:318
+#, no-c-format
+msgid "FullTextQuery.ID: the id property value of the projected object."
+msgstr ""
+
+#. Tag: para
+#: query.xml:323
+#, no-c-format
+msgid ""
+"FullTextQuery.DOCUMENT_ID: the Lucene document id. Careful, Lucene document "
+"id can change overtime between two different IndexReader opening (this "
+"feature is experimental)."
+msgstr ""
+
+#. Tag: para
+#: query.xml:329
+#, no-c-format
+msgid ""
+"FullTextQuery.EXPLANATION: returns the Lucene Explanation object for the "
+"matching object/document in the given query. Do not use if you retrieve a "
+"lot of data. Running explanation typically is as costly as running the whole "
+"Lucene query per matching element. Make sure you use projection!"
+msgstr ""
+
+#. Tag: title
+#: query.xml:341
+#, no-c-format
+msgid "Retrieving the results"
+msgstr ""
+
+#. Tag: para
+#: query.xml:343
+#, no-c-format
+msgid ""
+"Once the Hibernate Search query is built, executing it is in no way "
+"different than executing a HQL or Criteria query. The same paradigm and "
+"object semantic applies. All the common operations are available: "
+"<methodname>list()</methodname>, <methodname>uniqueResult()</methodname>, "
+"<methodname>iterate()</methodname>, <methodname>scroll()</methodname>."
+msgstr ""
+
+#. Tag: title
+#: query.xml:351
+#, no-c-format
+msgid "Performance considerations"
+msgstr ""
+
+#. Tag: para
+#: query.xml:353
+#, no-c-format
+msgid ""
+"If you expect a reasonable number of results (for example using pagination) "
+"and expect to work on all of them, <methodname>list()</methodname> or "
+"<methodname>uniqueResult()</methodname> are recommended. <methodname>list()</"
+"methodname> work best if the entity <literal>batch-size</literal> is set up "
+"properly. Note that Hibernate Search has to process all Lucene Hits elements "
+"(within the pagination) when using <methodname>list()</methodname> , "
+"<methodname>uniqueResult()</methodname> and <methodname>iterate()</"
+"methodname>."
+msgstr ""
+
+#. Tag: para
+#: query.xml:364
+#, no-c-format
+msgid ""
+"If you wish to minimize Lucene document loading, <methodname>scroll()</"
+"methodname> is more appropriate. Don't forget to close the "
+"<classname>ScrollableResults</classname> object when you're done, since it "
+"keeps Lucene resources. If you expect to use <methodname>scroll,</"
+"methodname> but wish to load objects in batch, you can use <methodname>query."
+"setFetchSize()</methodname>. When an object is accessed, and if not already "
+"loaded, Hibernate Search will load the next <literal>fetchSize</literal> "
+"objects in one pass."
+msgstr ""
+
+#. Tag: para
+#: query.xml:373
+#, no-c-format
+msgid "Pagination is a preferred method over scrolling though."
+msgstr ""
+
+#. Tag: title
+#: query.xml:377
+#, no-c-format
+msgid "Result size"
+msgstr ""
+
+#. Tag: para
+#: query.xml:379
+#, no-c-format
+msgid "It is sometime useful to know the total number of matching documents:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:384
+#, no-c-format
+msgid "for the Google-like feature 1-10 of about 888,000,000"
+msgstr ""
+
+#. Tag: para
+#: query.xml:388
+#, no-c-format
+msgid "to implement a fast pagination navigation"
+msgstr ""
+
+#. Tag: para
+#: query.xml:392
+#, no-c-format
+msgid ""
+"to implement a multi step search engine (adding approximation if the "
+"restricted query return no or not enough results)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:397
+#, no-c-format
+msgid ""
+"Of course it would be too costly to retrieve all the matching documents. "
+"Hibernate Search allows you to retrieve the total number of matching "
+"documents regardless of the pagination parameters. Even more interesting, "
+"you can retrieve the number of matching elements without triggering a single "
+"object load."
+msgstr ""
+
+#. Tag: title
+#: query.xml:404
+#, no-c-format
+msgid "Determining the result size of a query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:406
+#, no-c-format
+msgid ""
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery"
+"( luceneQuery, Book.class );\n"
+"assert 3245 == <emphasis role=\"bold\">query.getResultSize()</emphasis>; //"
+"return the number of matching books without loading a single one\n"
+"\n"
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery"
+"( luceneQuery, Book.class );\n"
+"query.setMaxResult(10);\n"
+"List results = query.list();\n"
+"assert 3245 == <emphasis role=\"bold\">query.getResultSize()</emphasis>; //"
+"return the total number of matching books regardless of pagination"
+msgstr ""
+
+#. Tag: para
+#: query.xml:410
+#, no-c-format
+msgid ""
+"Like Google, the number of results is approximative if the index is not "
+"fully up-to-date with the database (asynchronous cluster for example)."
+msgstr ""
+
+#. Tag: title
+#: query.xml:417
+#, no-c-format
+msgid "ResultTransformer"
+msgstr ""
+
+#. Tag: para
+#: query.xml:419
+#, no-c-format
+msgid ""
+"Especially when using projection, the data structure returned by a query (an "
+"object array in this case), is not always matching the application needs. It "
+"is possible to apply a <classname>ResultTransformer</classname> operation "
+"post query to match the targeted data structure:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:426
+#, no-c-format
+msgid "Using ResultTransformer in conjunction with projections"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:428
+#, no-c-format
+msgid ""
+"org.hibernate.search.FullTextQuery query = s.createFullTextQuery"
+"( luceneQuery, Book.class );\n"
+"query.setProjection( \"title\", \"mainAuthor.name\" );\n"
+"\n"
+"<emphasis role=\"bold\">query.setResultTransformer( \n"
+" new StaticAliasToBeanResultTransformer( BookView.class, \"title\", "
+"\"author\" ) \n"
+");</emphasis>\n"
+"List<BookView> results = (List<BookView>) query.list();\n"
+"for(BookView view : results) {\n"
+" log.info( \"Book: \" + view.getTitle() + \", \" + view.getAuthor() );\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:431
+#, no-c-format
+msgid ""
+"Examples of <classname>ResultTransformer</classname> implementations can be "
+"found in the Hibernate Core codebase."
+msgstr ""
+
+#. Tag: title
+#: query.xml:436
+#, no-c-format
+msgid "Understanding results"
+msgstr ""
+
+#. Tag: para
+#: query.xml:438
+#, no-c-format
+msgid ""
+"You will find yourself sometimes puzzled by a result showing up in a query "
+"or a result not showing up in a query. Luke is a great tool to understand "
+"those mysteries. However, Hibernate Search also gives you access to the "
+"Lucene <classname>Explanation</classname> object for a given result (in a "
+"given query). This class is considered fairly advanced to Lucene users but "
+"can provide a good understanding of the scoring of an object. You have two "
+"ways to access the Explanation object for a given result:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:449
+#, no-c-format
+msgid "Use the <methodname>fullTextQuery.explain(int)</methodname> method"
+msgstr ""
+
+#. Tag: para
+#: query.xml:454
+#, no-c-format
+msgid "Use projection"
+msgstr ""
+
+#. Tag: para
+#: query.xml:458
+#, no-c-format
+msgid ""
+"The first approach takes a document id as a parameter and return the "
+"Explanation object. The document id can be retrieved using projection and "
+"the <literal>FullTextQuery.DOCUMENT_ID</literal> constant."
+msgstr ""
+
+#. Tag: para
+#: query.xml:464
+#, no-c-format
+msgid ""
+"The Document id has nothing to do with the entity id. Do not mess up these "
+"two notions."
+msgstr ""
+
+#. Tag: para
+#: query.xml:468
+#, no-c-format
+msgid ""
+"The second approach let's you project the <classname>Explanation</classname> "
+"object using the <literal>FullTextQuery.EXPLANATION</literal> constant."
+msgstr ""
+
+#. Tag: title
+#: query.xml:473
+#, no-c-format
+msgid "Retrieving the Lucene Explanation object using projection"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:476
+#, no-c-format
+msgid ""
+"FullTextQuery ftQuery = s.createFullTextQuery( luceneQuery, Dvd.class )\n"
+" .setProjection( FullTextQuery.DOCUMENT_ID, <emphasis role=\"bold"
+"\">FullTextQuery.EXPLANATION</emphasis>, FullTextQuery.THIS );\n"
+"@SuppressWarnings(\"unchecked\") List<Object[]> results = ftQuery.list"
+"();\n"
+"for (Object[] result : results) {\n"
+" Explanation e = (Explanation) result[1];\n"
+" display( e.toString() );\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:479
+#, no-c-format
+msgid ""
+"Be careful, building the explanation object is quite expensive, it is "
+"roughly as expensive as running the Lucene query again. Don't do it if you "
+"don't need the object"
+msgstr ""
+
+#. Tag: title
+#: query.xml:486
+#, no-c-format
+msgid "Filters"
+msgstr ""
+
+#. Tag: para
+#: query.xml:488
+#, no-c-format
+msgid ""
+"Apache Lucene has a powerful feature that allows to filter query results "
+"according to a custom filtering process. This is a very powerful way to "
+"apply additional data restrictions, especially since filters can be cached "
+"and reused. Some interesting use cases are:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:495
+#, no-c-format
+msgid "security"
+msgstr ""
+
+#. Tag: para
+#: query.xml:499
+#, no-c-format
+msgid "temporal data (eg. view only last month's data)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:503
+#, no-c-format
+msgid "population filter (eg. search limited to a given category)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:508
+#, no-c-format
+msgid "and many more"
+msgstr ""
+
+#. Tag: para
+#: query.xml:512
+#, no-c-format
+msgid ""
+"Hibernate Search pushes the concept further by introducing the notion of "
+"parameterizable named filters which are transparently cached. For people "
+"familiar with the notion of Hibernate Core filters, the API is very similar:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:518
+#, no-c-format
+msgid "Enabling fulltext filters for a given query"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:520
+#, no-c-format
+msgid ""
+"fullTextQuery = s.createFullTextQuery( query, Driver.class );\n"
+"fullTextQuery.enableFullTextFilter(\"bestDriver\");\n"
+"fullTextQuery.enableFullTextFilter(\"security\").setParameter( \"login\", "
+"\"andre\" );\n"
+"fullTextQuery.list(); //returns only best drivers where andre has credentials"
+msgstr ""
+
+#. Tag: para
+#: query.xml:523
+#, no-c-format
+msgid ""
+"In this example we enabled two filters on top of the query. You can enable "
+"(or disable) as many filters as you like."
+msgstr ""
+
+#. Tag: para
+#: query.xml:526
+#, no-c-format
+msgid ""
+"Declaring filters is done through the <classname>@FullTextFilterDef</"
+"classname> annotation. This annotation can be on any <literal>@Indexed</"
+"literal> entity regardless of the query the filter is later applied to. This "
+"implies that filter definitions are global and their names must be unique. A "
+"<classname>SearchException</classname> is thrown in case two different "
+"<classname>@FullTextFilterDef</classname> annotations with the same name are "
+"defined. Each named filter has to specify its actual filter implementation."
+msgstr ""
+
+#. Tag: title
+#: query.xml:537
+#, no-c-format
+msgid "Defining and implementing a Filter"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:539
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"@FullTextFilterDefs( {\n"
+" <emphasis role=\"bold\">@FullTextFilterDef(name = \"bestDriver\", impl = "
+"BestDriversFilter.class)</emphasis>, \n"
+" <emphasis role=\"bold\">@FullTextFilterDef(name = \"security\", impl = "
+"SecurityFilterFactory.class)</emphasis> \n"
+"})\n"
+"public class Driver { ... }"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:541
+#, no-c-format
+msgid ""
+"public class BestDriversFilter extends <emphasis\n"
+" role=\"bold\">org.apache.lucene.search.Filter</emphasis> {\n"
+"\n"
+" public DocIdSet getDocIdSet(IndexReader reader) throws IOException {\n"
+" OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );\n"
+" TermDocs termDocs = reader.termDocs( new Term( \"score\", \"5"
+"\" ) );\n"
+" while ( termDocs.next() ) {\n"
+" bitSet.set( termDocs.doc() );\n"
+" }\n"
+" return bitSet;\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:544
+#, no-c-format
+msgid ""
+"<classname>BestDriversFilter</classname> is an example of a simple Lucene "
+"filter which reduces the result set to drivers whose score is 5. In this "
+"example the specified filter implements the <literal>org.apache.lucene."
+"search.Filter</literal> directly and contains a no-arg constructor."
+msgstr ""
+
+#. Tag: para
+#: query.xml:550
+#, no-c-format
+msgid ""
+"If your Filter creation requires additional steps or if the filter you want "
+"to use does not have a no-arg constructor, you can use the factory pattern:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:555
+#, no-c-format
+msgid "Creating a filter using the factory pattern"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:557
+#, no-c-format
+msgid ""
+"@Entity\n"
+"@Indexed\n"
+"@FullTextFilterDef(name = \"bestDriver\", impl = BestDriversFilterFactory."
+"class)\n"
+"public class Driver { ... }\n"
+"\n"
+"public class BestDriversFilterFactory {\n"
+"\n"
+" <emphasis role=\"bold\">@Factory</emphasis>\n"
+" public Filter getFilter() {\n"
+" //some additional steps to cache the filter results per IndexReader\n"
+" Filter bestDriversFilter = new BestDriversFilter();\n"
+" return new CachingWrapperFilter(bestDriversFilter);\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:560
+#, no-c-format
+msgid ""
+"Hibernate Search will look for a <literal>@Factory</literal> annotated "
+"method and use it to build the filter instance. The factory must have a no-"
+"arg constructor. For people familiar with JBoss Seam, this is similar to the "
+"component factory pattern, but the annotation is different!"
+msgstr ""
+
+#. Tag: para
+#: query.xml:566
+#, no-c-format
+msgid ""
+"Named filters come in handy where parameters have to be passed to the "
+"filter. For example a security filter might want to know which security "
+"level you want to apply:"
+msgstr ""
+
+#. Tag: title
+#: query.xml:571
+#, no-c-format
+msgid "Passing parameters to a defined filter"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:573
+#, no-c-format
+msgid ""
+"fullTextQuery = s.createFullTextQuery( query, Driver.class );\n"
+"fullTextQuery.enableFullTextFilter(\"security\")<emphasis role=\"bold\">."
+"setParameter( \"level\", 5 )</emphasis>;"
+msgstr ""
+
+#. Tag: para
+#: query.xml:576
+#, no-c-format
+msgid ""
+"Each parameter name should have an associated setter on either the filter or "
+"filter factory of the targeted named filter definition."
+msgstr ""
+
+#. Tag: title
+#: query.xml:580
+#, no-c-format
+msgid "Using parameters in the actual filter implementation"
+msgstr ""
+
+#. Tag: programlisting
+#: query.xml:582
+#, no-c-format
+msgid ""
+"public class SecurityFilterFactory {\n"
+" private Integer level;\n"
+"\n"
+" /**\n"
+" * injected parameter\n"
+" */\n"
+" <emphasis role=\"bold\">public void setLevel(Integer level)</emphasis> "
+"{\n"
+" this.level = level;\n"
+" }\n"
+"\n"
+" <emphasis role=\"bold\">@Key\n"
+" public FilterKey getKey()</emphasis> {\n"
+" StandardFilterKey key = new StandardFilterKey();\n"
+" key.addParameter( level );\n"
+" return key;\n"
+" }\n"
+"\n"
+" @Factory\n"
+" public Filter getFilter() {\n"
+" Query query = new TermQuery( new Term(\"level\", level.toString"
+"() ) );\n"
+" return new CachingWrapperFilter( new QueryWrapperFilter(query) );\n"
+" }\n"
+"}"
+msgstr ""
+
+#. Tag: para
+#: query.xml:585
+#, no-c-format
+msgid ""
+"Note the method annotated <classname>@Key</classname> returning a "
+"<classname>FilterKey</classname> object. The returned object has a special "
+"contract: the key object must implement <methodname>equals()</methodname> / "
+"<methodname>hashCode()</methodname> so that 2 keys are equal if and only if "
+"the given <classname>Filter</classname> types are the same and the set of "
+"parameters are the same. In other words, 2 filter keys are equal if and only "
+"if the filters from which the keys are generated can be interchanged. The "
+"key object is used as a key in the cache mechanism."
+msgstr ""
+
+#. Tag: para
+#: query.xml:594
+#, no-c-format
+msgid "<classname>@Key</classname> methods are needed only if:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:598
+#, no-c-format
+msgid "you enabled the filter caching system (enabled by default)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:603
+#, no-c-format
+msgid "your filter has parameters"
+msgstr ""
+
+#. Tag: para
+#: query.xml:607
+#, no-c-format
+msgid ""
+"In most cases, using the <literal>StandardFilterKey</literal> implementation "
+"will be good enough. It delegates the <methodname>equals()</methodname> / "
+"<methodname>hashCode()</methodname> implementation to each of the parameters "
+"equals and hashcode methods."
+msgstr ""
+
+#. Tag: para
+#: query.xml:613
+#, no-c-format
+msgid ""
+"As mentioned before the defined filters are per default cached and the cache "
+"uses a combination of hard and soft references to allow disposal of memory "
+"when needed. The hard reference cache keeps track of the most recently used "
+"filters and transforms the ones least used to <classname>SoftReferences</"
+"classname> when needed. Once the limit of the hard reference cache is "
+"reached additional filters are cached as <classname>SoftReferences</"
+"classname>. To adjust the size of the hard reference cache, use "
+"<literal>hibernate.search.filter.cache_strategy.size</literal> (defaults to "
+"128). For advanced use of filter caching, you can implement your own "
+"<classname>FilterCachingStrategy</classname>. The classname is defined by "
+"<literal>hibernate.search.filter.cache_strategy</literal>."
+msgstr ""
+
+#. Tag: para
+#: query.xml:626
+#, no-c-format
+msgid ""
+"This filter caching mechanism should not be confused with caching the actual "
+"filter results. In Lucene it is common practice to wrap filters using the "
+"<classname>IndexReader</classname> around a <classname>CachingWrapperFilter."
+"</classname> The wrapper will cache the <classname>DocIdSet</classname> "
+"returned from the <methodname>getDocIdSet(IndexReader reader)</methodname> "
+"method to avoid expensive recomputation. It is important to mention that the "
+"computed <classname>DocIdSet</classname> is only cachable for the same "
+"<classname>IndexReader</classname> instance, because the reader effectively "
+"represents the state of the index at the moment it was opened. The document "
+"list cannot change within an opened <classname>IndexReader</classname>. A "
+"different/new<classname> IndexReader</classname> instance, however, works "
+"potentially on a different set of <classname>Document</classname>s (either "
+"from a different index or simply because the index has changed), hence the "
+"cached <classname>DocIdSet</classname> has to be recomputed."
+msgstr ""
+
+#. Tag: para
+#: query.xml:643
+#, no-c-format
+msgid ""
+"Hibernate Search also helps with this aspect of caching. Per default the "
+"<literal>cache</literal> flag of <classname>@FullTextFilterDef </"
+"classname>is set to <literal>FilterCacheModeType."
+"INSTANCE_AND_DOCIDSETRESULTS</literal> which will automatically cache the "
+"filter instance as well as wrap the specified filter around a Hibernate "
+"specific implementation of <classname>CachingWrapperFilter</classname> "
+"(<classname>org.hibernate.search.filter.CachingWrapperFilter</classname>). "
+"In contrast to Lucene's version of this class <classname>SoftReference</"
+"classname>s are used together with a hard reference count (see discussion "
+"about filter cache). The hard reference count can be adjusted using "
+"<literal>hibernate.search.filter.cache_docidresults.size</literal> (defaults "
+"to 5). The wrapping behaviour can be controlled using the "
+"<literal>@FullTextFilterDef.cache</literal> parameter. There are three "
+"different values for this parameter:"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:664
+#, no-c-format
+msgid "Value"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:666
+#, no-c-format
+msgid "Definition"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:672
+#, no-c-format
+msgid "FilterCacheModeType.NONE"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:674
+#, no-c-format
+msgid ""
+"No filter instance and no result is cached by Hibernate Search. For every "
+"filter call, a new filter instance is created. This setting might be useful "
+"for rapidly changing data sets or heavily memory constrained environments."
+msgstr ""
+
+#. Tag: entry
+#: query.xml:681
+#, no-c-format
+msgid "FilterCacheModeType.INSTANCE_ONLY"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:683
+#, no-c-format
+msgid ""
+"The filter instance is cached and reused across concurrent "
+"<methodname>Filter.getDocIdSet()</methodname> calls. <classname>DocIdSet</"
+"classname> results are not cached. This setting is useful when a filter uses "
+"its own specific caching mechanism or the filter results change dynamically "
+"due to application specific events making <classname>DocIdSet</classname> "
+"caching in both cases unnecessary."
+msgstr ""
+
+#. Tag: entry
+#: query.xml:695
+#, no-c-format
+msgid "FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS"
+msgstr ""
+
+#. Tag: entry
+#: query.xml:697
+#, no-c-format
+msgid ""
+"Both the filter instance and the <classname>DocIdSet</classname> results are "
+"cached. This is the default value."
+msgstr ""
+
+#. Tag: para
+#: query.xml:702
+#, no-c-format
+msgid ""
+"Last but not least - why should filters be cached? There are two areas where "
+"filter caching shines:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:708
+#, no-c-format
+msgid ""
+"the system does not update the targeted entity index often (in other words, "
+"the IndexReader is reused a lot)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:713
+#, no-c-format
+msgid ""
+"the Filter's DocIdSet is expensive to compute (compared to the time spent to "
+"execute the query)"
+msgstr ""
+
+#. Tag: title
+#: query.xml:720
+#, no-c-format
+msgid "Optimizing the query process"
+msgstr ""
+
+#. Tag: para
+#: query.xml:722
+#, no-c-format
+msgid "Query performance depends on several criteria:"
+msgstr ""
+
+#. Tag: para
+#: query.xml:726
+#, no-c-format
+msgid "the Lucene query itself: read the literature on this subject"
+msgstr ""
+
+#. Tag: para
+#: query.xml:731
+#, no-c-format
+msgid ""
+"the number of object loaded: use pagination (always ;-) ) or index "
+"projection (if needed)"
+msgstr ""
+
+#. Tag: para
+#: query.xml:736
+#, no-c-format
+msgid ""
+"the way Hibernate Search interacts with the Lucene readers: defines the "
+"appropriate <xref linkend=\"search-architecture-readerstrategy\"/>."
+msgstr ""
+
+#. Tag: title
+#: query.xml:744
+#, no-c-format
+msgid "Native Lucene Queries"
+msgstr ""
+
+#. Tag: para
+#: query.xml:746
+#, no-c-format
+msgid ""
+"If you wish to use some specific features of Lucene, you can always run "
+"Lucene specific queries. Check <xref linkend=\"search-lucene-native\"/> for "
+"more information."
+msgstr ""
Property changes on: search/trunk/hibernate-search/src/main/docbook/zh-CN/modules/query.po
___________________________________________________________________
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Environment.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/Environment.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/Environment.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,119 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public final class Environment {
+ /**
+ * Enable listeners auto registration in Hibernate Annotations and EntityManager. Default to true.
+ */
+ public static final String AUTOREGISTER_LISTENERS = "hibernate.search.autoregister_listeners";
+
+ /**
+ * Defines the indexing strategy, default <code>event</code>
+ * Other options <code>manual</code>
+ */
+ public static final String INDEXING_STRATEGY = "hibernate.search.indexing_strategy";
+
+ /**
+ * Default Lucene analyser
+ */
+ public static final String ANALYZER_CLASS = "hibernate.search.analyzer";
+
+ /**
+ * Default Lucene similarity
+ */
+ public static final String SIMILARITY_CLASS = "hibernate.search.similarity";
+
+ public static final String WORKER_PREFIX = "hibernate.search.worker.";
+ public static final String WORKER_SCOPE = WORKER_PREFIX + "scope";
+ public static final String WORKER_BACKEND = WORKER_PREFIX + "backend";
+ public static final String WORKER_EXECUTION = WORKER_PREFIX + "execution";
+
+ /**
+ * Defines the maximum number of indexing operation batched per transaction
+ */
+ public static final String WORKER_BATCHSIZE = WORKER_PREFIX + "batch_size";
+
+ /**
+ * only used then execution is async
+ * Thread pool size
+ * default 1
+ */
+ public static final String WORKER_THREADPOOL_SIZE = Environment.WORKER_PREFIX + "thread_pool.size";
+
+ /**
+ * Size of the buffer queue (besides the thread pool size)
+ * <ul>
+ * <li>only used then execution is async</li>
+ * <li>default infinite</li>
+ * </ul>
+ */
+ public static final String WORKER_WORKQUEUE_SIZE = Environment.WORKER_PREFIX + "buffer_queue.max";
+
+ /**
+ * define the reader prefix
+ */
+ public static final String READER_PREFIX = "hibernate.search.reader.";
+
+ /**
+ * define the reader strategy used
+ */
+ public static final String READER_STRATEGY = READER_PREFIX + "strategy";
+
+ /**
+ * filter caching strategy class (must have a no-arg constructor and implement FilterCachingStrategy)
+ */
+ public static final String FILTER_CACHING_STRATEGY = "hibernate.search.filter.cache_strategy";
+
+ /**
+ * number of docidresults cached in hard reference.
+ */
+ public static final String CACHE_DOCIDRESULTS_SIZE = "hibernate.search.filter.cache_docidresults.size";
+
+ /**
+ * batch backend implementation class (must have a no-arg constructor and implement BatchBackend)
+ * also prefix for configuration settings of the batch backend
+ */
+ public static final String BATCH_BACKEND = "hibernate.search.batchbackend";
+
+ /**
+ * When set to true a lock on the index will not be released until the
+ * SearchFactory (or SessionFactory) is closed.
+ * This improves performance in applying changes to the index, but no other application
+ * can access the index in write mode while Hibernate Search is running.
+ * This is an index-scoped property and defaults to false.
+ */
+ public static final String EXCLUSIVE_INDEX_USE = "exclusive_index_use";
+
+ /**
+ *
+ */
+ public static final String MODEL_MAPPING = "hibernate.search.model_mapping";
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Environment.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextFilter.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+/**
+ * represents a FullTextFilter that is about to be applied
+ * Used to inject parameters
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextFilter {
+ FullTextFilter setParameter(String name, Object value);
+ Object getParameter(String name);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextQuery.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextQuery.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextQuery.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,144 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.Explanation;
+
+import org.hibernate.Criteria;
+import org.hibernate.Query;
+import org.hibernate.transform.ResultTransformer;
+
+/**
+ * The base interface for Lucene powered searches.
+ *
+ * @author Hardy Ferentschik
+ * @author Emmanuel Bernard
+ */
+//TODO return FullTextQuery rather than Query in useful chain methods
+public interface FullTextQuery extends Query, ProjectionConstants {
+
+ /**
+ * Allows to let lucene sort the results. This is useful when you have
+ * additional sort requirements on top of the default lucene ranking.
+ * Without lucene sorting you would have to retrieve the full result set and
+ * order the hibernate objects.
+ *
+ * @param sort The lucene sort object.
+ *
+ * @return this for method chaining
+ */
+ FullTextQuery setSort(Sort sort);
+
+ /**
+ * Allows to use lucene filters.
+ * Semi-deprecated? a preferred way is to use the @FullTextFilterDef approach
+ *
+ * @param filter The lucene filter.
+ *
+ * @return this for method chaining
+ */
+ FullTextQuery setFilter(Filter filter);
+
+ /**
+ * Returns the number of hits for this search
+ * <p/>
+ * Caution:
+ * The number of results might be slightly different from
+ * <code>list().size()</code> because list() if the index is
+ * not in sync with the database at the time of query.
+ */
+ int getResultSize();
+
+ /**
+ * Defines the Database Query used to load the Lucene results.
+ * Useful to load a given object graph by refining the fetch modes
+ * <p/>
+ * No projection (criteria.setProjection() ) allowed, the root entity must be the only returned type
+ * No where restriction can be defined either.
+ */
+ FullTextQuery setCriteriaQuery(Criteria criteria);
+
+ /**
+ * Defines the Lucene field names projected and returned in a query result
+ * Each field is converted back to it's object representation, an Object[] being returned for each "row"
+ * (similar to an HQL or a Criteria API projection).
+ * <p/>
+ * A projectable field must be stored in the Lucene index and use a {@link org.hibernate.search.bridge.TwoWayFieldBridge}
+ * Unless notified in their JavaDoc, all built-in bridges are two-way. All @DocumentId fields are projectable by design.
+ * <p/>
+ * If the projected field is not a projectable field, null is returned in the object[]
+ */
+ FullTextQuery setProjection(String... fields);
+
+ /**
+ * Enable a given filter by its name. Returns a FullTextFilter object that allows filter parameter injection
+ */
+ FullTextFilter enableFullTextFilter(String name);
+
+ /**
+ * Disable a given filter by its name
+ */
+ void disableFullTextFilter(String name);
+
+ /**
+ * Return the Lucene {@link org.apache.lucene.search.Explanation}
+ * object describing the score computation for the matching object/document
+ * in the current query
+ *
+ * @param documentId Lucene Document id to be explain. This is NOT the object id
+ *
+ * @return Lucene Explanation
+ */
+ Explanation explain(int documentId);
+
+ /**
+ * {link:Query#setFirstResult}
+ */
+ FullTextQuery setFirstResult(int firstResult);
+
+ /**
+ * {link:Query#setMaxResults}
+ */
+ FullTextQuery setMaxResults(int maxResults);
+
+ /**
+ * Defines scrollable result fetch size as well as the JDBC fetch size
+ */
+ FullTextQuery setFetchSize(int i);
+
+ /**
+ * defines a result transformer used during projection, the Aliases provided are the projection aliases.
+ */
+ FullTextQuery setResultTransformer(ResultTransformer transformer);
+
+ /**
+ * return the underlying type if possible or IllegalArgumentException otherwise
+ * Supported types are:
+ * - org.apache.lucene.search.Query the underlying lucene query
+ */
+ <T> T unwrap(Class<T> type);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextQuery.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextSession.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextSession.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextSession.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,103 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import java.io.Serializable;
+
+import org.hibernate.classic.Session;
+
+/**
+ * Extends the Hibernate {@link Session} with fulltext search and indexing capabilities.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextSession extends Session {
+
+ /**
+ * Create a fulltext query on top of a native Lucene query returning the matching objects
+ * of type <code>entities</code> and their respective subclasses.
+ *
+ * @param luceneQuery The native Lucene query to be rn against the Lucene index.
+ * @param entities List of classes for type filtering. The query result will only return entities of
+ * the specified types and their respective subtype. If no class is specified no type filtering will take place.
+ *
+ * @return A <code>FullTextQuery</code> wrapping around the native Lucene wuery.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class<?>... entities);
+
+ /**
+ * Force the (re)indexing of a given <b>managed</b> object.
+ * Indexation is batched per transaction: if a transaction is active, the operation
+ * will not affect the index at least until commit.
+ *
+ * @param entity The entity to index - must not be <code>null</code>.
+ *
+ * @throws IllegalArgumentException if entity is null or not an @Indexed entity
+ */
+ <T> void index(T entity);
+
+ /**
+ * @return the <code>SearchFactory</code> instance.
+ */
+ SearchFactory getSearchFactory();
+
+ /**
+ * Remove the entity with the type <code>entityType</code> and the identifier <code>id</code> from the index.
+ * If <code>id == null</code> all indexed entities of this type and its indexed subclasses are deleted. In this
+ * case this method behaves like {@link #purgeAll(Class)}.
+ *
+ * @param entityType The type of the entity to delete.
+ * @param id The id of the entity to delete.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purge(Class<T> entityType, Serializable id);
+
+ /**
+ * Remove all entities from of particular class and all its subclasses from the index.
+ *
+ * @param entityType The class of the entities to remove.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purgeAll(Class<T> entityType);
+
+ /**
+ * Flush all index changes forcing Hibernate Search to apply all changes to the index not waiting for the batch limit.
+ */
+ public void flushToIndexes();
+
+ /**
+ * Creates a MassIndexer to rebuild the indexes of some
+ * or all indexed entity types.
+ * Instances cannot be reused.
+ * @param types optionally restrict the operation to selected types
+ * @return
+ */
+ public MassIndexer createIndexer(Class<?>... types);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/FullTextSession.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/MassIndexer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/MassIndexer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/MassIndexer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,137 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import java.util.concurrent.Future;
+
+import org.hibernate.CacheMode;
+
+/**
+ * A MassIndexer is useful to rebuild the indexes from the
+ * data contained in the database.
+ * This process is expensive: all indexed entities and their
+ * indexedEmbedded properties are scrolled from database.
+ *
+ * @author Sanne Grinovero
+ */
+public interface MassIndexer {
+
+ /**
+ * Set the number of threads to be used to load
+ * the root entities.
+ * @param numberOfThreads
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer threadsToLoadObjects(int numberOfThreads);
+
+ /**
+ * Sets the batch size used to load the root entities.
+ * @param batchSize
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer batchSizeToLoadObjects(int batchSize);
+
+ /**
+ * Sets the number of threads used to load the lazy collections
+ * related to the indexed entities.
+ * @param numberOfThreads
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer threadsForSubsequentFetching(int numberOfThreads);
+
+ /**
+ * Sets the number of threads to be used to analyze the documents
+ * and write to the index.
+ * @param numberOfThreads
+ * @return
+ */
+ //TODO implement? performance improvement was found to be
+ //interesting in unusual setups only.
+ //MassIndexer threadsForIndexWriter(int numberOfThreads);
+
+ /**
+ * Sets the cache interaction mode for the data loading tasks.
+ * Defaults to <tt>CacheMode.IGNORE</tt>.
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer cacheMode(CacheMode cacheMode);
+
+ /**
+ * If index optimization has to be started at the end
+ * of the indexing process.
+ * Defaults to <tt>true</tt>.
+ * @param optimize
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer optimizeOnFinish(boolean optimize);
+
+ /**
+ * If index optimization should be run before starting,
+ * after the purgeAll. Has no effect if <tt>purgeAll</tt> is set to false.
+ * Defaults to <tt>true</tt>.
+ * @param optimize
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer optimizeAfterPurge(boolean optimize);
+
+ /**
+ * If all entities should be removed from the index before starting
+ * using purgeAll. Set it to false only if you know there are no
+ * entities in the index: otherwise search results may be duplicated.
+ * Defaults to true.
+ * @param purgeAll
+ * @return <tt>this</tt> for method chaining
+ */
+ MassIndexer purgeAllOnStart(boolean purgeAll);
+
+ /**
+ * EXPERIMENTAL method: will probably change
+ *
+ * Will stop indexing after having indexed a set amount of objects.
+ * As a results the index will not be consistent
+ * with the database: use only for testing on an (undefined) subset of database data.
+ * @param maximum
+ * @return
+ */
+ MassIndexer limitIndexedObjectsTo(int maximum);
+
+ /**
+ * Starts the indexing process in background (asynchronous).
+ * Can be called only once.
+ * @return a Future to control task canceling.
+ * get() will block until completion.
+ * cancel() is currently not implemented.
+ */
+ Future<?> start();
+
+ /**
+ * Starts the indexing process, and then block until it's finished.
+ * Can be called only once.
+ * @throws InterruptedException if the current thread is interrupted
+ * while waiting.
+ */
+ void startAndWait() throws InterruptedException;
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/MassIndexer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/ProjectionConstants.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/ProjectionConstants.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/ProjectionConstants.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+/**
+ * Defined projection constants.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ProjectionConstants {
+ /**
+ * Represents the Hibernate entity returned in a search.
+ */
+ public String THIS = "__HSearch_This";
+
+ /**
+ * The Lucene document returned by a search.
+ */
+ public String DOCUMENT = "__HSearch_Document";
+
+ /**
+ * The legacy document's score from a search.
+ */
+ public String SCORE = "__HSearch_Score";
+
+ /**
+ * The boost value of the Lucene document.
+ *
+ * @deprecated always return 1
+ */
+ public String BOOST = "__HSearch_Boost";
+
+ /**
+ * Object id property
+ */
+ public String ID = "__HSearch_id";
+
+ /**
+ * Lucene Document id
+ * Experimental: If you use this feature, please speak up in the forum
+ * <p/>
+ * Expert: Lucene document id can change overtime between 2 different IndexReader opening.
+ */
+ public String DOCUMENT_ID = "__HSearch_DocumentId";
+
+ /**
+ * Lucene {@link org.apache.lucene.search.Explanation} object describing the score computation for
+ * the matching object/document
+ * This feature is relatively expensive, do not use unless you return a limited
+ * amount of objects (using pagination)
+ * To retrieve explanation of a single result, consider retrieving {@link #DOCUMENT_ID}
+ * and using fullTextQuery.explain(int)
+ */
+ public String EXPLANATION = "__HSearch_Explanation";
+
+ /**
+ * Represents the Hibernate entity class returned in a search. In contrast to the other constants this constant
+ * represents an actual field value of the underlying Lucene document and hence can directly be used in queries.
+ */
+ public String OBJECT_CLASS = "_hibernate_class";
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/ProjectionConstants.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Search.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/Search.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/Search.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import org.hibernate.Session;
+import org.hibernate.search.impl.FullTextSessionImpl;
+
+/**
+ * Helper class to get a FullTextSession out of a regular session.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public final class Search {
+
+ private Search() {
+ }
+
+ public static FullTextSession getFullTextSession(Session session) {
+ if (session instanceof FullTextSessionImpl) {
+ return (FullTextSession) session;
+ }
+ else {
+ return new FullTextSessionImpl(session);
+ }
+ }
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link #getFullTextSession(Session)}
+ */
+ @Deprecated
+ public static FullTextSession createFullTextSession(Session session) {
+ return getFullTextSession(session);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Search.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchException.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchException.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchException.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,49 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+/**
+ * Root of all search specific exceptions
+ *
+ * @author Emmanuel Bernard
+ */
+public class SearchException extends RuntimeException {
+
+ public SearchException() {
+ super();
+ }
+
+ public SearchException(String message) {
+ super( message );
+ }
+
+ public SearchException(String message, Throwable cause) {
+ super( message, cause );
+ }
+
+ public SearchException(Throwable cause) {
+ super( cause );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchException.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Provide application wide operations as well as access to the underlying Lucene resources.
+ * @author Emmanuel Bernard
+ */
+public interface SearchFactory {
+ /**
+ * Provide the configured readerProvider strategy,
+ * hence access to a Lucene IndexReader
+ */
+ ReaderProvider getReaderProvider();
+
+ /**
+ * Provide access to the DirectoryProviders (hence the Lucene Directories)
+ * for a given entity
+ * In most cases, the returned type will be a one element array.
+ * But if the given entity is configured to use sharded indexes, then multiple
+ * elements will be returned. In this case all of them should be considered.
+ */
+ DirectoryProvider[] getDirectoryProviders(Class<?> entity);
+
+ /**
+ * Optimize all indexes
+ */
+ void optimize();
+
+ /**
+ * Optimize the index holding <code>entityType</code>
+ */
+ void optimize(Class entityType);
+
+ /**
+ * Experimental API
+ * retrieve an analyzer instance by its definition name
+ *
+ * @throws SearchException if the definition name is unknown
+ */
+ Analyzer getAnalyzer(String name);
+
+ /**
+ * Retrieves the scoped analyzer for a given class.
+ *
+ * @param clazz The class for which to retrieve the analyzer.
+ * @return The scoped analyzer for the specified class.
+ * @throws IllegalArgumentException in case <code>clazz == null</code> or the specified
+ * class is not an indexed entity.
+ */
+ Analyzer getAnalyzer(Class<?> clazz);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/SearchFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Version.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/Version.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/Version.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search;
+
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class Version {
+
+ public static String getVersionString() {
+ return "[WORKING]";
+ }
+
+ static {
+ LoggerFactory.make().info( "Hibernate Search {}", getVersionString() );
+ }
+
+ public static void touch() {
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/Version.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/Discriminator.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/Discriminator.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/Discriminator.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.analyzer;
+
+/**
+ * Returns the expected discriminator name to use on the element evaluated
+ *
+ * @author Hardy Ferentschik
+ */
+public interface Discriminator {
+
+ /**
+ * Allows to specify the analyzer to be used for the given field based on the specified entity state.
+ *
+ * @param value The value of the field the <code>@AnalyzerDiscriminator</code> annotation was placed on. <code>null</code>
+ * if the annotation was placed on class level.
+ * @param entity The entity to be indexed.
+ * @param field The document field.
+ * @return The name of a defined analyzer to be used for the specified <code>field</code> or <code>null</code> if the
+ * default analyzer for this field should be used.
+ * @see org.hibernate.search.annotations.AnalyzerDef
+ */
+ String getAnalyzerDefinitionName(Object value, Object entity, String field);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/analyzer/Discriminator.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Define an Analyzer for a given entity, method, field or Field
+ * The order of precedence is as such:
+ * - @Field
+ * - field / method
+ * - entity
+ * - default
+ *
+ * Either describe an explicit implementation through the <code>impl</code> parameter
+ * or use an external @AnalyzerDef definition through the <code>def</code> parameter
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE, ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface Analyzer {
+ Class<?> impl() default void.class;
+ String definition() default "";
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDef.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDef.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDef.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,64 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Reusable analyzer definition.
+ * An analyzer definition defines:
+ * <ul>
+ * <li>one tokenizer</li>
+ * </li>optionally one or more filters</li>
+ * </ul>
+ * Filters are applied in the order they are defined.
+ * <p/>
+ * Reuses the Solr Tokenizer and Filter architecture.
+ *
+ * @author Emmanuel Bernard
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD })
+@Documented
+public @interface AnalyzerDef {
+ /**
+ * @return Reference name to be used on {#org.hibernate.search.annotations.Analyzer}
+ */
+ String name();
+
+ /**
+ * @return Tokenizer used.
+ */
+ TokenizerDef tokenizer();
+
+ /**
+ * @return Filters used. The filters are applied in the defined order
+ */
+ TokenFilterDef[] filters() default { };
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDef.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDefs.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDefs.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDefs.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Reusable analyzer definitions.
+ * This annotation allows multiple definition declarations per element
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE, ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface AnalyzerDefs {
+ AnalyzerDef[] value();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDefs.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDiscriminator.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDiscriminator.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDiscriminator.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+import org.hibernate.search.analyzer.Discriminator;
+
+/**
+ * Allows to dynamically select a named analyzer through a <code>Discriminator</code> implementation.
+ *
+ * @author Hardy Ferentschik
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD })
+@Documented
+public @interface AnalyzerDiscriminator {
+ public Class<? extends Discriminator> impl();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/AnalyzerDiscriminator.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Boost.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Boost.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Boost.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,43 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Apply a boost factor on a field or a whole entity
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.TYPE, ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface Boost {
+ float value();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Boost.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/CalendarBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/CalendarBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/CalendarBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Defines the temporal resolution of a given field
+ * Calendar are stored as String in GMT
+ *
+ * @author Amin Mohammed-Coleman
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface CalendarBridge {
+ Resolution resolution();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/CalendarBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * This annotation allows a user to apply an implementation
+ * class to a Lucene document to manipulate it in any way
+ * the user sees fit.
+ *
+ * @author John Griffin
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.TYPE )
+@Documented
+public @interface ClassBridge {
+ /**
+ * Field name, default to the JavaBean property name.
+ */
+ String name() default "";
+
+ /**
+ * Should the value be stored in the document.
+ * defaults to no.
+ */
+ Store store() default Store.NO;
+
+ /**
+ * Define an analyzer for the field, default to
+ * the inherited analyzer.
+ */
+ Analyzer analyzer() default @Analyzer;
+
+ /**
+ * Defines how the Field should be indexed
+ * defaults to tokenized.
+ */
+ Index index() default Index.TOKENIZED;
+
+ /**
+ * Define term vector storage requirements,
+ * default to NO.
+ */
+ TermVector termVector() default TermVector.NO;
+
+ /**
+ * A float value of the amount of Lucene defined
+ * boost to apply to a field.
+ */
+ Boost boost() default @Boost(value=1.0F);
+
+ /**
+ * User supplied class to manipulate document in
+ * whatever mysterious ways they wish to.
+ */
+ public Class<?> impl();
+
+ /**
+ * Array of fields to work with. The impl class
+ * above will work on these fields.
+ */
+ public Parameter[] params() default {};
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridges.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridges.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridges.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @author John Griffin
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.TYPE )
+@Documented
+public @interface ClassBridges {
+ /**
+ * An array of ClassBridge annotations each of
+ * which is to be applied to the class containing
+ * this annotation.
+ */
+ ClassBridge[] value() default {};
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ClassBridges.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ContainedIn.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ContainedIn.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ContainedIn.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,64 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Describe the owning entity as being part of the target entity's
+ * index (to be more accurate, being part of the indexed object graph).
+ * <p>
+ * Only necessary when an @Indexed class is used as a @IndexedEmbedded
+ * target class. @ContainedIn must mark the property pointing back
+ * to the @IndexedEmbedded owning Entity.
+ * <p>
+ * Not necessary if the class is an @Embeddable class.
+ * <p>
+ * <code>
+ * @Indexed<br>
+ * public class OrderLine {<br>
+ * @IndexedEmbedded<br>
+ * private Order order;<br>
+ * }<br>
+ *<br>
+ * @Indexed<br>
+ * public class Order {<br>
+ * @ContainedIn<br>
+ * Set<OrderLine> lines;<br>
+ * }<br>
+ * </code><br>
+ * @see org.hibernate.search.annotations.Indexed
+ * @see org.hibernate.search.annotations.IndexedEmbedded
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface ContainedIn {
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ContainedIn.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DateBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DateBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DateBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.hibernate.search.annotations.Resolution;
+
+/**
+ * Defines the temporal resolution of a given field
+ * Date are stored as String in GMT
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+//TODO allow pattern like yyyyMMdd?
+//TODO allow base timezone?
+public @interface DateBridge {
+ Resolution resolution();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DateBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DocumentId.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DocumentId.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DocumentId.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declare a field as the document id. If set to a property, the property will be used
+ * TODO: If set to a class, the class itself will be passed to the FieldBridge
+ * Note that @{link org.hibernate.search.bridge.FieldBridge#get} must return the Entity id
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface DocumentId {
+ String name() default "";
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DocumentId.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DynamicBoost.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DynamicBoost.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DynamicBoost.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * Apply a dynamic boost factor on a field or a whole entity.
+ *
+ * @author Hardy Ferentschik
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD })
+@Documented
+public @interface DynamicBoost {
+
+ /**
+ * @return An implementation of <code>BoostStrategy</code> to apply a boost
+ * value as function of the annotated object.
+ *
+ * @see org.hibernate.search.engine.BoostStrategy
+ */
+ public abstract Class<? extends BoostStrategy> impl();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/DynamicBoost.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Factory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Factory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Factory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Marks a method as a factory method for a given type.
+ * A factory method is called whenever a new instance of a given
+ * type is requested.
+ * The factory method is used with a higher priority than a plain no-arg constructor when present
+ * <br />
+ * <code>@Factory</code> currently works for @FullTextFilterDef.impl classes
+ * @see org.hibernate.search.annotations.Factory
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.METHOD )
+@Documented
+public @interface Factory {
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Factory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Field.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Field.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Field.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,86 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+
+/**
+ * JavaDoc copy/pastle from the Apache Lucene project
+ * Available under the ASL 2.0 http://www.apache.org/licenses/LICENSE-2.0
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Mark a property as indexable
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.METHOD, ElementType.FIELD } )
+@Documented
+public @interface Field {
+ /**
+ * Field name, default to the JavaBean property name
+ */
+ String name() default "";
+
+ /**
+ * Should the value be stored in the document
+ * defaults to no.
+ */
+ Store store() default Store.NO;
+
+ /**
+ * Defines how the Field should be indexed
+ * defaults to tokenized
+ */
+ Index index() default Index.TOKENIZED;
+
+ /**
+ * Define term vector storage requirements,
+ * default to NO.
+ */
+ TermVector termVector() default TermVector.NO;
+
+ /**
+ * Define an analyzer for the field, default to
+ * the inherited analyzer
+ */
+ Analyzer analyzer() default @Analyzer;
+
+
+ /**
+ * Boost factor, default 1
+ */
+ Boost boost() default @Boost( value = 1.0F );
+
+ /**
+ * Field bridge used. Default is autowired.
+ */
+ FieldBridge bridge() default @FieldBridge;
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Field.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FieldBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FieldBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FieldBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * specifies a given field bridge implementation
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( {ElementType.FIELD, ElementType.METHOD} )
+@Documented
+public @interface FieldBridge {
+ //default to embed @FieldBridge in @Field
+ public Class<?> impl() default void.class;
+
+ public Parameter[] params() default {};
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FieldBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Fields.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Fields.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Fields.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Mark a property as indexable into different fields
+ * Useful if the field is used for sorting and searching
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.METHOD, ElementType.FIELD} )
+@Documented
+public @interface Fields {
+ /**
+ * Fields
+ */
+ Field[] value();
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Fields.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FilterCacheModeType.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FilterCacheModeType.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FilterCacheModeType.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+/**
+ * Cache mode strategy for <code>FullTextFilterDef</code>s.
+ *
+ * @see FullTextFilterDef
+ * @author Emmanuel Bernard
+ */
+public enum FilterCacheModeType {
+ /**
+ * No filter instance and no result is cached by Hibernate Search.
+ * For every filter call, a new filter instance is created.
+ */
+ NONE,
+
+ /**
+ * The filter instance is cached by Hibernate Search and reused across
+ * concurrent <code>Filter.getDocIdSet()</code> calls.
+ * Results are not cached by Hibernate Search.
+ *
+ * @see org.apache.lucene.search.Filter#bits(org.apache.lucene.index.IndexReader)
+
+ */
+ INSTANCE_ONLY,
+
+ /**
+ * Both the filter instance and the <code>DocIdSet</code> results are cached.
+ * The filter instance is cached by Hibernate Search and reused across
+ * concurrent <code>Filter.getDocIdSet()</code> calls.
+ * <code>DocIdSet</code> results are cached per <code>IndexReader</code>.
+ *
+ * @see org.apache.lucene.search.Filter#bits(org.apache.lucene.index.IndexReader)
+ */
+ INSTANCE_AND_DOCIDSETRESULTS
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FilterCacheModeType.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDef.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDef.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDef.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Defines a FullTextFilter that can be optionally applied to
+ * every FullText Queries
+ * While not related to a specific indexed entity, the annotation has to be set on one of them
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE } )
+@Documented
+public @interface FullTextFilterDef {
+ /**
+ * @return the filter name. Must be unique across all mappings for a given persistence unit
+ */
+ String name();
+
+ /**
+ * Either implements {@link org.apache.lucene.search.Filter}
+ * or contains a <code>@Factory</code> method returning one.
+ * The generated <code>Filter</code> must be thread-safe.
+ *
+ * If the filter accept parameters, an <code>@Key</code> method must be present as well.
+ *
+ * @return a class which either implements <code>Filter</code> directly or contains a method annotated with
+ * <code>@Factory</code>.
+ *
+ */
+ Class<?> impl();
+
+ /**
+ * @return The cache mode for the filter. Default to instance and results caching
+ */
+ FilterCacheModeType cache() default FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS;
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDef.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDefs.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDefs.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDefs.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,43 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * A list of FullTextFilterDef
+ *
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.TYPE } )
+@Documented
+public @interface FullTextFilterDefs {
+ FullTextFilterDef[] value();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/FullTextFilterDefs.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Index.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Index.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Index.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+/**
+ * Defines how an Field should be indexed
+ */
+public enum Index {
+ /**
+ * Do not index the field value. This field can thus not be searched,
+ * but one can still access its contents provided it is
+ * {@link Store stored}.
+ */
+ NO,
+ /**
+ * Index the field's value so it can be searched. An Analyzer will be used
+ * to tokenize and possibly further normalize the text before its
+ * terms will be stored in the index. This is useful for common text.
+ */
+ TOKENIZED,
+ /**
+ * Index the field's value without using an Analyzer, so it can be searched.
+ * As no analyzer is used the value will be stored as a single term. This is
+ * useful for unique Ids like product numbers.
+ */
+ UN_TOKENIZED,
+ /**
+ * Index the field's value without an Analyzer, and disable
+ * the storing of norms. No norms means that index-time boosting
+ * and field length normalization will be disabled. The benefit is
+ * less memory usage as norms take up one byte per indexed field
+ * for every document in the index.
+ */
+ NO_NORMS
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Index.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Indexed.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Indexed.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Indexed.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.TYPE )
+@Documented
+/**
+ * Specifies that an entity is to be indexed by Lucene
+ */
+public @interface Indexed {
+ /**
+ * @return The filename of the index
+ */
+ String index() default "";
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Indexed.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/IndexedEmbedded.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/IndexedEmbedded.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/IndexedEmbedded.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,59 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( { ElementType.FIELD, ElementType.METHOD } )
+@Documented
+/**
+ * Specifies that an association (@*To*, @Embedded, @CollectionOfEmbedded) is to be indexed
+ * in the root entity index
+ * It allows queries involving associated objects restrictions
+ */
+public @interface IndexedEmbedded {
+ /**
+ * Field name prefix
+ * Default to 'propertyname.'
+ */
+ String prefix() default ".";
+
+ /**
+ * Stop indexing embedded elements when depth is reached
+ * depth=1 means the associated element is index, but not its embedded elements
+ * Default: infinite (an exception will be raised if a class circular reference occurs while infinite is chosen)
+ */
+ int depth() default Integer.MAX_VALUE;
+
+ /**
+ * Overrides the type of an association. If a collection, overrides the type of the collection generics
+ */
+ Class<?> targetElement() default void.class;
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/IndexedEmbedded.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Key.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Key.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Key.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+/**
+ * Marks a method as a key constructor for a given type.
+ * A key is an object that uniquely identify a given object type and a given set of parameters
+ *
+ * The key object must implement equals / hashcode so that 2 keys are equals iif
+ * the given target object types are the same, the set of parameters are the same.
+ *
+ * @Factory currently works for @FullTextFilterDef.impl classes
+ * @see org.hibernate.search.annotations.Factory
+ * @author Emmanuel Bernard
+ */
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.METHOD )
+@Documented
+public @interface Key {
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Key.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Parameter.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Parameter.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Parameter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,42 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Target;
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+/**
+ * Parameter (basically key/value pattern)
+ *
+ * @author Emmanuel Bernard
+ */
+@Target({})
+@Retention(RUNTIME)
+public @interface Parameter {
+ String name();
+
+ String value();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Parameter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ProvidedId.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ProvidedId.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ProvidedId.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,50 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Objects whose identifier is provided externally and not part of the object state
+ * should be marked with this annotation
+ * <p/>
+ * This annotation should not be used in conjunction with {@link org.hibernate.search.annotations.DocumentId}
+ *
+ * @author Navin Surtani (<a href="mailto:nsurtani@redhat.com">nsurtani(a)redhat.com</a>)
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+(a)Target(ElementType.TYPE)
+@Documented
+public @interface ProvidedId {
+
+ String name() default "providedId";
+
+ FieldBridge bridge() default @FieldBridge(impl = org.hibernate.search.bridge.builtin.StringBridge.class);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/ProvidedId.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Resolution.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Resolution.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Resolution.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+/**
+ * Date indexing resolution.
+ *
+ * @author Emmanuel Bernard
+ */
+public enum Resolution {
+ YEAR,
+ MONTH,
+ DAY,
+ HOUR,
+ MINUTE,
+ SECOND,
+ MILLISECOND
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Resolution.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Similarity.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Similarity.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Similarity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,43 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention( RetentionPolicy.RUNTIME )
+@Target( ElementType.TYPE )
+@Documented
+/**
+ * Specifies a similarity implementation to use for a given class
+ *
+ * @author Nick Vincent
+ */
+public @interface Similarity {
+ public Class<?> impl();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Similarity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Store.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Store.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Store.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+/**
+ * Whether or not the value is stored in the document
+ *
+ * @author Emmanuel Bernard
+ */
+public enum Store {
+ /** does not store the value in the index */
+ NO,
+ /** stores the value in the index */
+ YES,
+ /** stores the value in the index in a compressed form */
+ COMPRESS
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/Store.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TermVector.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TermVector.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TermVector.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+/**
+ * Defines the term vector storing strategy
+ *
+ * @author John Griffin
+ */
+public enum TermVector {
+ /**
+ * Store term vectors.
+ */
+ YES,
+ /**
+ * Do not store term vectors.
+ */
+ NO,
+ /**
+ * Store the term vector + Token offset information
+ */
+ WITH_OFFSETS,
+ /**
+ * Store the term vector + token position information
+ */
+ WITH_POSITIONS,
+ /**
+ * Store the term vector + Token position and offset information
+ */
+ WITH_POSITION_OFFSETS
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TermVector.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenFilterDef.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenFilterDef.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenFilterDef.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.apache.solr.analysis.TokenFilterFactory;
+
+/**
+ * Define a <code>TokenFilterFactory</code> and its parameters.
+ *
+ * @author Emmanuel Bernard
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD })
+@Documented
+public @interface TokenFilterDef {
+ /**
+ * @return the <code>TokenFilterFactory</code> class which shall be instantiated.
+ */
+ public abstract Class<? extends TokenFilterFactory> factory();
+
+ /**
+ * @return Optional parameters passed to the <code>TokenFilterFactory</code>.
+ */
+ public abstract Parameter[] params() default { };
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenFilterDef.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenizerDef.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenizerDef.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenizerDef.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Documented;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+/**
+ * Define a <code>TokenizerFactory</code> and its parameters.
+ *
+ * @author Emmanuel Bernard
+ */
+(a)Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD })
+@Documented
+public @interface TokenizerDef {
+
+ /**
+ * @return the <code>TokenizerFactory</code> class which shall be instantiated.
+ */
+ Class<? extends TokenizerFactory> factory();
+
+ /**
+ * @return Optional parameters passed to the <code>TokenizerFactory</code>.
+ */
+ Parameter[] params() default { };
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/annotations/TokenizerDef.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/AddLuceneWork.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/AddLuceneWork.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/AddLuceneWork.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,67 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class AddLuceneWork extends LuceneWork implements Serializable {
+
+ private static final long serialVersionUID = -2450349312813297371L;
+
+ private final Map<String, String> fieldToAnalyzerMap;
+
+ public AddLuceneWork(Serializable id, String idInString, Class entity, Document document) {
+ this( id, idInString, entity, document, false );
+ }
+
+ public AddLuceneWork(Serializable id, String idInString, Class entity, Document document, boolean batch) {
+ this( id, idInString, entity, document, null, batch );
+ }
+
+ public AddLuceneWork(Serializable id, String idInString, Class entity, Document document, Map<String, String> fieldToAnalyzerMap) {
+ this( id, idInString, entity, document, fieldToAnalyzerMap, false );
+ }
+
+ public AddLuceneWork(Serializable id, String idInString, Class entity, Document document, Map<String, String> fieldToAnalyzerMap, boolean batch) {
+ super( id, idInString, entity, document, batch );
+ this.fieldToAnalyzerMap = fieldToAnalyzerMap;
+ }
+
+ public Map<String, String> getFieldToAnalyzerMap() {
+ return fieldToAnalyzerMap;
+ }
+
+ @Override
+ public <T> T getWorkDelegate(final WorkVisitor<T> visitor) {
+ return visitor.getDelegate( this );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/AddLuceneWork.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/BackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/BackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/BackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.util.Properties;
+import java.util.List;
+
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Interface for different types of queue processor factories. Implementations need a no-arg constructor.
+ * The factory typically prepares or pools the resources needed by the queue processor.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface BackendQueueProcessorFactory {
+
+ /**
+ * Used at startup, called once as first method.
+ * @param props all configuration properties
+ * @param searchFactory the client
+ */
+ void initialize(Properties props, SearchFactoryImplementor searchFactory);
+
+ /**
+ * Return a runnable implementation responsible for processing the queue to a given backend.
+ *
+ * @param queue The work queue to process.
+ * @return <code>Runnable</code> which processes <code>queue</code> when started.
+ */
+ Runnable getProcessor(List<LuceneWork> queue);
+
+ /**
+ * Used to shutdown and eventually release resources.
+ * No other method should be used after this one.
+ */
+ void close();
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/BackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/DeleteLuceneWork.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/DeleteLuceneWork.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/DeleteLuceneWork.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DeleteLuceneWork extends LuceneWork implements Serializable {
+
+ private static final long serialVersionUID = -854604138119230246L;
+
+ public DeleteLuceneWork(Serializable id, String idInString, Class entity) {
+ super( id, idInString, entity );
+ }
+
+ @Override
+ public <T> T getWorkDelegate(final WorkVisitor<T> visitor) {
+ return visitor.getDelegate( this );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/DeleteLuceneWork.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneIndexingParameters.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneIndexingParameters.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneIndexingParameters.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,197 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexWriter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.configuration.IndexWriterSetting;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_FIELD_LENGTH;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.USE_COMPOUND_FILE;
+import org.hibernate.search.backend.configuration.MaskedProperty;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Wrapper class around the Lucene indexing parameters defined in IndexWriterSetting.
+ * <p>
+ * There are two sets of these parameters. One is for regular indexing the other is for batch indexing
+ * triggered by <code>FullTextSessoin.index(Object entity)</code>
+ *
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+public class LuceneIndexingParameters implements Serializable {
+
+ private static final long serialVersionUID = 5424606407623591663L;
+ private static final Logger log = LoggerFactory.make();
+
+ // value keyword
+ public static final String EXPLICIT_DEFAULT_VALUE = "default";
+ // property path keywords
+ public static final String BATCH = "batch";
+ public static final String TRANSACTION = "transaction";
+ public static final String PROP_GROUP = "indexwriter";
+
+ private final ParameterSet transactionIndexParameters;
+ private final ParameterSet batchIndexParameters;
+
+ public LuceneIndexingParameters( Properties sourceProps ) {
+ //prefer keys under "indexwriter" but fallback for backwards compatibility:
+ Properties indexingParameters = new MaskedProperty( sourceProps, PROP_GROUP, sourceProps );
+ //get keys for "transaction"
+ Properties transactionProps = new MaskedProperty( indexingParameters, TRANSACTION );
+ //get keys for "batch"
+ Properties batchProps = new MaskedProperty( indexingParameters, BATCH );
+ transactionIndexParameters = new ParameterSet( transactionProps, TRANSACTION );
+ batchIndexParameters = new ParameterSet( batchProps, BATCH );
+ doSanityChecks( transactionIndexParameters, batchIndexParameters );
+ }
+
+ private void doSanityChecks(ParameterSet transParams, ParameterSet batchParams) {
+ if ( log.isWarnEnabled() ) {
+ Integer maxFieldLengthTransaction = transParams.parameters.get( MAX_FIELD_LENGTH );
+ Integer maxFieldLengthBatch = batchParams.parameters.get( MAX_FIELD_LENGTH );
+ if ( notEquals( maxFieldLengthTransaction, maxFieldLengthBatch ) ) {
+ log.warn( "The max_field_length value configured for transaction is "
+ + "different than the value configured for batch." );
+ }
+ Integer useCompoundTransaction = transParams.parameters.get( USE_COMPOUND_FILE );
+ Integer useCompoundBatch = batchParams.parameters.get( USE_COMPOUND_FILE );
+ if ( notEquals( useCompoundTransaction, useCompoundBatch ) ) {
+ log.warn( "The IndexWriter setting \"use_compound_file\" for batch "+
+ "mode can't be different from the transaction setting." );
+ }
+ }
+ }
+
+ private boolean notEquals(Integer a, Integer b) {
+ if ( a==null && b==null ) return false;
+ if ( a==null && b!=null ) return true;
+ if ( a!=null && b==null ) return true;
+ return a.intValue() != b.intValue();
+ }
+
+ public ParameterSet getTransactionIndexParameters() {
+ return transactionIndexParameters;
+ }
+
+ public ParameterSet getBatchIndexParameters() {
+ return batchIndexParameters;
+ }
+
+ public static class ParameterSet implements Serializable {
+
+ private static final long serialVersionUID = -6121723702279869524L;
+
+ final Map<IndexWriterSetting, Integer> parameters = new EnumMap<IndexWriterSetting, Integer>(IndexWriterSetting.class);
+
+ public ParameterSet(Properties prop, String paramName) {
+ //don't iterate on property entries as we know all the keys:
+ for ( IndexWriterSetting t : IndexWriterSetting.values() ) {
+ String key = t.getKey();
+ String value = prop.getProperty( key );
+ if ( ! ( value==null || EXPLICIT_DEFAULT_VALUE.equalsIgnoreCase( value ) ) ) {
+ if ( log.isDebugEnabled() ) {
+ //TODO add DirectoryProvider name when available to log message
+ log.debug( "Set indexwriter parameter " + paramName +"." + key + " to value : "+ value );
+ }
+ parameters.put( t, t.parseVal( value ) );
+ }
+ }
+ }
+
+ /**
+ * Applies the parameters represented by this to a writer.
+ * Undefined parameters are not set, leaving the lucene default.
+ * @param writer the IndexWriter whereto the parameters will be applied.
+ */
+ public void applyToWriter(IndexWriter writer) {
+ for ( Map.Entry<IndexWriterSetting,Integer> entry : parameters.entrySet() ) {
+ try {
+ entry.getKey().applySetting( writer, entry.getValue() );
+ } catch ( IllegalArgumentException e ) {
+ //TODO if DirectoryProvider had getDirectoryName() exceptions could tell better
+ throw new SearchException( "Illegal IndexWriter setting "
+ + entry.getKey().getKey() + " "+ e.getMessage(), e );
+ }
+ }
+ }
+
+ public Integer getCurrentValueFor(IndexWriterSetting ws){
+ return parameters.get( ws );
+ }
+
+ public void setCurrentValueFor(IndexWriterSetting ws, Integer newValue){
+ if ( newValue == null ) {
+ parameters.remove( ws );
+ } else {
+ parameters.put( ws, newValue );
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + ((parameters == null) ? 0 : parameters.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if ( this == obj )
+ return true;
+ if ( obj == null )
+ return false;
+ if ( getClass() != obj.getClass() )
+ return false;
+ final ParameterSet other = (ParameterSet) obj;
+ if (parameters == null) {
+ if ( other.parameters != null )
+ return false;
+ } else if ( ! parameters.equals( other.parameters ) )
+ return false;
+ return true;
+ }
+
+ }
+
+ public void applyToWriter(IndexWriter writer, boolean batch) {
+ if ( batch ) {
+ getBatchIndexParameters().applyToWriter( writer );
+ }
+ else {
+ getTransactionIndexParameters().applyToWriter( writer );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneIndexingParameters.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneWork.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneWork.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneWork.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,95 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * Represent a Serializable Lucene unit work
+ *
+ * WARNING: This class aims to be serializable and passed in an asynchronous way across VMs
+ * any non backward compatible serialization change should be done with great care
+ * and publically announced. Specifically, new versions of Hibernate Search should be
+ * able to handle changes produced by older versions of Hibernate Search if reasonably possible.
+ * That is why each subclass susceptible to be pass along have a magic serialization number.
+ * NOTE: we are relying on Lucene's Document to play nice unfortunately
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+public abstract class LuceneWork implements Serializable {
+
+ private final Document document;
+ private final Class entityClass;
+ private final Serializable id;
+
+ /**
+ * Flag indicating if this lucene work has to be indexed in batch mode.
+ */
+ private final boolean batch;
+ private final String idInString;
+
+ public LuceneWork(Serializable id, String idInString, Class entity) {
+ this( id, idInString, entity, null );
+ }
+
+ public LuceneWork(Serializable id, String idInString, Class entity, Document document) {
+ this( id, idInString, entity, document, false );
+ }
+
+ public LuceneWork(Serializable id, String idInString, Class entity, Document document, boolean batch) {
+ this.id = id;
+ this.idInString = idInString;
+ this.entityClass = entity;
+ this.document = document;
+ this.batch = batch;
+ }
+
+ public boolean isBatch() {
+ return batch;
+ }
+
+ public Document getDocument() {
+ return document;
+ }
+
+ public Class getEntityClass() {
+ return entityClass;
+ }
+
+ public Serializable getId() {
+ return id;
+ }
+
+ public String getIdInString() {
+ return idInString;
+ }
+
+ public abstract <T> T getWorkDelegate(WorkVisitor<T> visitor);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/LuceneWork.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/OptimizeLuceneWork.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/OptimizeLuceneWork.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/OptimizeLuceneWork.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+/**
+ * A unit of work triggering an optimize operation.
+ * This work does not propagate to a cluster: it should be filtered before being sent to
+ * the network.
+ *
+ * @author Andrew Hahn
+ * @author Emmanuel Bernard
+ */
+public class OptimizeLuceneWork extends LuceneWork implements Serializable {
+
+ public OptimizeLuceneWork(Class entity) {
+ super( null, null, entity );
+ }
+
+ @Override
+ public <T> T getWorkDelegate(final WorkVisitor<T> visitor) {
+ return visitor.getDelegate( this );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/OptimizeLuceneWork.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/PurgeAllLuceneWork.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/PurgeAllLuceneWork.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/PurgeAllLuceneWork.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+/**
+ * A unit of work used to purge an entire index.
+ *
+ * @author John Griffin
+ */
+public class PurgeAllLuceneWork extends LuceneWork implements Serializable {
+
+ private static final long serialVersionUID = 8124091288284011715L;
+
+ public PurgeAllLuceneWork(Class entity) {
+ super( null, null, entity, null );
+ }
+
+ @Override
+ public <T> T getWorkDelegate(final WorkVisitor<T> visitor) {
+ return visitor.getDelegate( this );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/PurgeAllLuceneWork.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/QueueingProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/QueueingProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/QueueingProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+/**
+ * Pile work operations
+ * No thread safety has to be implemented, the queue being scoped already
+ * The implementation must be "stateless" wrt the queue through (ie not store the queue state)
+ *
+ * FIXME this Interface does not make much sense, since the impl will not be changed
+ *
+ * @author Emmanuel Bernard
+ */
+public interface QueueingProcessor {
+ /**
+ * Add a work
+ * TODO move that somewhere else, it does not really fit here
+ */
+ void add(Work work, WorkQueue workQueue);
+
+ /**
+ * prepare resources for a later performWorks call
+ */
+ void prepareWorks(WorkQueue workQueue);
+
+ /**
+ * Execute works
+ */
+ void performWorks(WorkQueue workQueue);
+
+ /**
+ * Rollback works
+ */
+ void cancelWorks(WorkQueue workQueue);
+
+ /**
+ * clean resources
+ * This method should log errors rather than raise an exception
+ */
+ void close();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/QueueingProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/TransactionContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/TransactionContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/TransactionContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import javax.transaction.Synchronization;
+
+/**
+ * Contract needed by Hibernate Search to batch changes per transaction.
+ *
+ * @author Navin Surtani - navin(a)surtani.org
+ */
+public interface TransactionContext {
+ /**
+ * @return A boolean indicating whether a transaction is in progress or not.
+ */
+ public boolean isTransactionInProgress();
+
+ /**
+ * @return a transaction object.
+ */
+ public Object getTransactionIdentifier();
+
+ /**
+ * Register the given synchronization.
+ *
+ * @param synchronization synchronization to register
+ */
+ public void registerSynchronization(Synchronization synchronization);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/TransactionContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Work.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Work.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Work.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.Serializable;
+
+import org.hibernate.annotations.common.reflection.XMember;
+
+/**
+ * A unit of work. Only make sense inside the same session since it uses the scope principle.
+ *
+ * @author Emmanuel Bernard
+ */
+public class Work<T> {
+ private final T entity;
+ private final Class<T> entityClass;
+ private final Serializable id;
+ private final XMember idGetter;
+ private final WorkType type;
+
+ public Work(T entity, Serializable id, WorkType type) {
+ this( entity, null, id, null, type );
+ }
+
+ public Work(Class<T> entityType, Serializable id, WorkType type) {
+ this( null, entityType, id, null, type );
+ }
+
+ public Work(T entity, XMember idGetter, WorkType type) {
+ this( entity, null, null, idGetter, type );
+ }
+
+ private Work(T entity, Class<T> entityClass, Serializable id,
+ XMember idGetter, WorkType type) {
+ this.entity = entity;
+ this.entityClass = entityClass;
+ this.id = id;
+ this.idGetter = idGetter;
+ this.type = type;
+ }
+
+ public Class<T> getEntityClass() {
+ return entityClass;
+ }
+
+ public T getEntity() {
+ return entity;
+ }
+
+ public Serializable getId() {
+ return id;
+ }
+
+ public XMember getIdGetter() {
+ return idGetter;
+ }
+
+ public WorkType getType() {
+ return type;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Work.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkQueue.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkQueue.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkQueue.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class WorkQueue {
+ private List<Work> queue;
+
+ private List<LuceneWork> sealedQueue;
+
+ public WorkQueue(int size) {
+ queue = new ArrayList<Work>(size);
+ }
+
+ private WorkQueue(List<Work> queue) {
+ this.queue = queue;
+ }
+
+ public WorkQueue() {
+ this(10);
+ }
+
+
+ public void add(Work work) {
+ queue.add(work);
+ }
+
+
+ public List<Work> getQueue() {
+ return queue;
+ }
+
+ public WorkQueue splitQueue() {
+ WorkQueue subQueue = new WorkQueue( queue );
+ this.queue = new ArrayList<Work>( queue.size() );
+ return subQueue;
+ }
+
+
+ public List<LuceneWork> getSealedQueue() {
+ if (sealedQueue == null) throw new AssertionFailure("Access a Sealed WorkQueue which has not been sealed");
+ return sealedQueue;
+ }
+
+ public void setSealedQueue(List<LuceneWork> sealedQueue) {
+ //invalidate the working queue for serializability
+ queue = Collections.EMPTY_LIST;
+ this.sealedQueue = sealedQueue;
+ }
+
+ public void clear() {
+ queue.clear();
+ if (sealedQueue != null) sealedQueue.clear();
+ }
+
+ public int size() {
+ return queue.size();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkQueue.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkType.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkType.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkType.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+/**
+ * Enumeration of the different types of Lucene work. This enumeration is used to specify the type
+ * of index operation to be executed.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author John Griffin
+ */
+public enum WorkType {
+ ADD(true),
+ UPDATE(true),
+ DELETE(false),
+ COLLECTION(true),
+ /**
+ * Used to remove a specific instance
+ * of a class from an index.
+ */
+ PURGE(false),
+ /**
+ * Used to remove all instances of a
+ * class from an index.
+ */
+ PURGE_ALL(false),
+
+ /**
+ * This type is used for batch indexing.
+ */
+ INDEX(true);
+
+ private final boolean searchForContainers;
+
+ private WorkType(boolean searchForContainers) {
+ this.searchForContainers = searchForContainers;
+ }
+
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ public boolean searchForContainers() {
+ return this.searchForContainers;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkType.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkVisitor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkVisitor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkVisitor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+/**
+ * A visitor delegate to manipulate a LuceneWork
+ * needs to implement this interface.
+ * This pattern enables any implementation to virtually add delegate
+ * methods to the base LuceneWork without having to change them.
+ * This contract however breaks if more subclasses of LuceneWork
+ * are created, as a visitor must support all existing types.
+ *
+ * @author Sanne Grinovero
+ *
+ * @param <T> used to force a return type of choice.
+ */
+public interface WorkVisitor<T> {
+
+ T getDelegate(AddLuceneWork addLuceneWork);
+ T getDelegate(DeleteLuceneWork deleteLuceneWork);
+ T getDelegate(OptimizeLuceneWork optimizeLuceneWork);
+ T getDelegate(PurgeAllLuceneWork purgeAllLuceneWork);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkVisitor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Worker.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Worker.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Worker.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,55 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.util.Properties;
+
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Perform work for a given session. This implementation has to be multi threaded.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface Worker {
+ //Use of EventSource since it's the common subinterface for Session and SessionImplementor
+ //the alternative would have been to do a subcasting or to retrieve 2 parameters :(
+ void performWork(Work work, TransactionContext transactionContext);
+
+ void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor);
+
+ /**
+ * clean resources
+ * This method can return exceptions
+ */
+ void close();
+
+ /**
+ * Flush any work queue.
+ *
+ * @param transactionContext the current transaction (context).
+ */
+ void flushWorks(TransactionContext transactionContext);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Worker.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkerFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkerFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkerFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.backend.impl.TransactionalWorker;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.PluginLoader;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class WorkerFactory {
+
+ private static Properties getProperties(SearchConfiguration cfg) {
+ Properties props = cfg.getProperties();
+ Properties workerProperties = new Properties();
+ for (Map.Entry entry : props.entrySet()) {
+ String key = (String) entry.getKey();
+ if ( key.startsWith( Environment.WORKER_PREFIX ) ) {
+ //key.substring( Environment.WORKER_PREFIX.length() )
+ workerProperties.setProperty( key, (String) entry.getValue() );
+ }
+ }
+ return workerProperties;
+ }
+
+ public static Worker createWorker(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor) {
+ Properties props = getProperties( cfg );
+ String impl = props.getProperty( Environment.WORKER_SCOPE );
+ Worker worker;
+ if ( StringHelper.isEmpty( impl ) ) {
+ worker = new TransactionalWorker();
+ }
+ else if ( "transaction".equalsIgnoreCase( impl ) ) {
+ worker = new TransactionalWorker();
+ }
+ else {
+ worker = PluginLoader.instanceFromName( Worker.class,
+ impl, WorkerFactory.class, "worker" );
+ }
+ worker.initialize( props, searchFactoryImplementor );
+ return worker;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/WorkerFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Workspace.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Workspace.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,217 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend;
+
+import java.io.IOException;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.search.Similarity;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.optimization.OptimizerStrategy;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Lucene workspace for a DirectoryProvider.<p/>
+ * Before using {@link #getIndexWriter} the lock must be acquired,
+ * and resources must be closed before releasing the lock.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+//TODO renaming to "DirectoryWorkspace" would be nice.
+public class Workspace {
+
+ private static final Logger log = LoggerFactory.make();
+ private static final Analyzer SIMPLE_ANALYZER = new SimpleAnalyzer();
+ private static final IndexWriter.MaxFieldLength maxFieldLength =
+ new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+
+ // invariant state:
+
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final DirectoryProvider<?> directoryProvider;
+ private final OptimizerStrategy optimizerStrategy;
+ private final ReentrantLock lock;
+ private final Set<Class<?>> entitiesInDirectory;
+ private final LuceneIndexingParameters indexingParams;
+ private final Similarity similarity;
+
+ // variable state:
+
+ /**
+ * Current open IndexWriter, or null when closed. Guarded by synchronization.
+ */
+ private IndexWriter writer;
+
+ /**
+ * Keeps a count of modification operations done on the index.
+ */
+ private final AtomicLong operations = new AtomicLong( 0L );
+
+ public Workspace(SearchFactoryImplementor searchFactoryImplementor, DirectoryProvider<?> provider) {
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.directoryProvider = provider;
+ this.optimizerStrategy = searchFactoryImplementor.getOptimizerStrategy( directoryProvider );
+ this.entitiesInDirectory = searchFactoryImplementor.getClassesInDirectoryProvider( provider );
+ this.indexingParams = searchFactoryImplementor.getIndexingParameters( directoryProvider );
+ this.lock = searchFactoryImplementor.getDirectoryProviderLock( provider );
+ this.similarity = searchFactoryImplementor.getSimilarity( directoryProvider );
+ }
+
+ public <T> DocumentBuilderIndexedEntity<T> getDocumentBuilder(Class<T> entity) {
+ return searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity );
+ }
+
+ public Analyzer getAnalyzer(String name) {
+ return searchFactoryImplementor.getAnalyzer( name );
+ }
+
+ /**
+ * If optimization has not been forced give a chance to configured OptimizerStrategy
+ * to optimize the index.
+ */
+ public void optimizerPhase() {
+ lock.lock();
+ try {
+ // used getAndSet(0) because Workspace is going to be reused by next transaction.
+ synchronized ( optimizerStrategy ) {
+ optimizerStrategy.addTransaction( operations.getAndSet( 0L ) );
+ optimizerStrategy.optimize( this );
+ }
+ }
+ finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Used by OptimizeLuceneWork after index optimization to flag that
+ * optimization has been forced.
+ * @see OptimizeLuceneWork
+ * @see SearchFactory#optimize()
+ * @see SearchFactory#optimize(Class)
+ */
+ public void optimize() {
+ lock.lock();
+ try {
+ //Needs to ensure the optimizerStrategy is accessed in threadsafe way
+ synchronized ( optimizerStrategy ) {
+ optimizerStrategy.optimizationForced();
+ }
+ }
+ finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Gets the IndexWriter, opening one if needed.
+ * @param batchmode when true the indexWriter settings for batch mode will be applied.
+ * Ignored if IndexWriter is open already.
+ * @throws SearchException on a IOException during index opening.
+ * @return a new IndexWriter or one already open.
+ */
+ public synchronized IndexWriter getIndexWriter(boolean batchmode) {
+ if ( writer != null )
+ return writer;
+ try {
+ writer = new IndexWriter( directoryProvider.getDirectory(), SIMPLE_ANALYZER, false, maxFieldLength ); // has been created at init time
+ indexingParams.applyToWriter( writer, batchmode );
+ writer.setSimilarity( similarity );
+ log.trace( "IndexWriter opened" );
+ }
+ catch ( IOException e ) {
+ writer = null;
+ throw new SearchException( "Unable to open IndexWriter", e );
+ }
+ return writer;
+ }
+
+ /**
+ * Commits changes to a previously opened IndexWriter.
+ *
+ * @throws SearchException on IOException during Lucene close operation,
+ * or if there is no IndexWriter to close.
+ */
+ public synchronized void commitIndexWriter() {
+ if ( writer != null ) {
+ try {
+ writer.commit();
+ log.trace( "Index changes commited." );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Exception while commiting index changes", e );
+ }
+ }
+ }
+
+ /**
+ * Closes a previously opened IndexWriter.
+ * @throws SearchException on IOException during Lucene close operation
+ */
+ public synchronized void closeIndexWriter() {
+ IndexWriter toClose = writer;
+ writer = null;
+ if ( toClose != null ) {
+ try {
+ toClose.close();
+ log.trace( "IndexWriter closed" );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Exception while closing IndexWriter", e );
+ }
+ }
+ }
+
+ /**
+ * Increment the counter of modification operations done on the index.
+ * Used (currently only) by the OptimizerStrategy.
+ * @param modCount the increment to add to the counter.
+ */
+ public void incrementModificationCounter(int modCount) {
+ operations.addAndGet( modCount );
+ }
+
+ /**
+ * @return The unmodifiable set of entity types being indexed
+ * in the underlying Lucene Directory backing this Workspace.
+ */
+ public Set<Class<?>> getEntitiesInDirectory() {
+ return entitiesInDirectory;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/Workspace.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/ConfigurationParseHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/ConfigurationParseHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/ConfigurationParseHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,116 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.configuration;
+
+import java.util.Properties;
+
+import org.hibernate.util.StringHelper;
+import org.hibernate.search.SearchException;
+
+/**
+ * Helper class to avoid managing NumberFormatException and similar code
+ * and ensure consistent error messages across Configuration parsing problems.
+ *
+ * @author Sanne Grinovero
+ */
+public abstract class ConfigurationParseHelper {
+
+ /**
+ * Parses a String to get an int value.
+ * @param value A string containing an int value to parse
+ * @param errorMsgOnParseFailure message being wrapped in a SearchException if value is null or not correct.
+ * @return the parsed value
+ * @throws SearchException both for null values and for Strings not containing a valid int.
+ */
+ public static final int parseInt(String value, String errorMsgOnParseFailure) {
+ if ( value == null ) {
+ throw new SearchException( errorMsgOnParseFailure );
+ }
+ else {
+ try {
+ return Integer.parseInt( value.trim() );
+ } catch (NumberFormatException nfe) {
+ throw new SearchException( errorMsgOnParseFailure, nfe );
+ }
+ }
+ }
+
+ /**
+ * In case value is null or an empty string the defValue is returned
+ * @param value
+ * @param defValue
+ * @param errorMsgOnParseFailure
+ * @return the converted int.
+ * @throws SearchException if value can't be parsed.
+ */
+ public static final int parseInt(String value, int defValue, String errorMsgOnParseFailure) {
+ if ( StringHelper.isEmpty( value ) ) {
+ return defValue;
+ }
+ else {
+ return parseInt( value, errorMsgOnParseFailure );
+ }
+ }
+
+ /**
+ * Looks for a numeric value in the Properties, returning
+ * defValue if not found or if an empty string is found.
+ * When the key the value is found but not in valid format
+ * a standard error message is generated.
+ * @param cfg
+ * @param key
+ * @param defValue
+ * @return the converted int.
+ * @throws SearchException for invalid format.
+ */
+ public static final int getIntValue(Properties cfg, String key, int defValue) {
+ String propValue = cfg.getProperty( key );
+ return parseInt( propValue, defValue, "Unable to parse " + key + ": " + propValue );
+ }
+
+ /**
+ * Parses a string to recognize exactly either "true" or "false".
+ * @param value the string to be parsed
+ * @param errorMsgOnParseFailure the message to be put in the exception if thrown
+ * @return true if value is "true", false if value is "false"
+ * @throws SearchException for invalid format or values.
+ */
+ public static final boolean parseBoolean(String value, String errorMsgOnParseFailure) {
+ // avoiding Boolean.valueOf() to have more checks: makes it easy to spot wrong type in cfg.
+ if ( value == null ) {
+ throw new SearchException( errorMsgOnParseFailure );
+ }
+ else if ( "false".equalsIgnoreCase( value.trim() ) ) {
+ return false;
+ }
+ else if ( "true".equalsIgnoreCase( value.trim() ) ) {
+ return true;
+ }
+ else {
+ throw new SearchException( errorMsgOnParseFailure );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/ConfigurationParseHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,151 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.configuration;
+
+import java.io.Serializable;
+
+import org.apache.lucene.index.IndexWriter;
+import org.hibernate.search.SearchException;
+
+/**
+ * Represents possible options to be applied to an
+ * <code>org.apache.lucene.index.IndexWriter</code>
+ *
+ * @author Sanne Grinovero
+ */
+public enum IndexWriterSetting implements Serializable {
+
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setMaxBufferedDeleteTerms(int)
+ */
+ MAX_BUFFERED_DELETE_TERMS( "max_buffered_delete_terms" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxBufferedDeleteTerms( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setMaxBufferedDocs(int)
+ */
+ MAX_BUFFERED_DOCS( "max_buffered_docs" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxBufferedDocs( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setMaxFieldLength(int)
+ */
+ MAX_FIELD_LENGTH( "max_field_length" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxFieldLength( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setMaxMergeDocs(int)
+ */
+ MAX_MERGE_DOCS( "max_merge_docs" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxMergeDocs( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setMergeFactor(int)
+ */
+ MERGE_FACTOR( "merge_factor" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMergeFactor( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setRAMBufferSizeMB(double)
+ */
+ RAM_BUFFER_SIZE( "ram_buffer_size" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setRAMBufferSizeMB( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setTermIndexInterval(int)
+ */
+ TERM_INDEX_INTERVAL( "term_index_interval" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setTermIndexInterval( value );
+ }
+ } ,
+ /**
+ * @see org.apache.lucene.index.IndexWriter#setUseCompoundFile(boolean)
+ */
+ USE_COMPOUND_FILE( "use_compound_file" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setUseCompoundFile( intToBoolean( value ) );
+ }
+ @Override
+ public Integer parseVal(String value) {
+ return USE_COMPOUND_FILE.parseBoolean( value );
+ }
+ };
+
+ private static final Integer TRUE = Integer.valueOf( 1 );
+ private static final Integer FALSE = Integer.valueOf( 0 );
+
+ private final String cfgKey;
+
+ IndexWriterSetting(String configurationKey) {
+ this.cfgKey = configurationKey;
+ }
+
+ /**
+ * @throws IllegalArgumentException when user selects an invalid value; should be wrapped.
+ */
+ public abstract void applySetting(IndexWriter writer, int value);
+
+ /**
+ * @return The key used in configuration files to select an option.
+ */
+ public String getKey() {
+ return cfgKey;
+ }
+
+ /**
+ * Specific parameters may override to provide additional keywords support.
+ * @param value the string value as in configuration file
+ * @return the integer value going to be set as parameter
+ * @throws SearchException for unrecognized values
+ */
+ public Integer parseVal(String value) {
+ return ConfigurationParseHelper.parseInt( value,
+ "Invalid value for " + cfgKey + ": " + value );
+ }
+
+ private Integer parseBoolean(String value) {
+ boolean v = ConfigurationParseHelper.parseBoolean( value,
+ "Invalid value for " + cfgKey + ": " + value );
+ return v ? TRUE : FALSE;
+ }
+
+ private static boolean intToBoolean(int value) {
+ return value == TRUE.intValue();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/MaskedProperty.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/MaskedProperty.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/MaskedProperty.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,398 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.configuration;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.InvalidPropertiesFormatException;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.slf4j.Logger;
+
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * A wrapper to Properties, to restrict the availability of
+ * values to only those which have a key beginning with some
+ * masking String.
+ * Supported methods to enumerate the list of properties are:
+ * - propertyNames()
+ * - keySet()
+ * - keys()
+ * Other methods including methods returning Entries and values are not supported
+ *
+ * @author Sanne Grinovero
+ * @author Emmanuel Bernard
+ */
+public class MaskedProperty extends Properties implements Serializable {
+
+ private static final long serialVersionUID = -593307257383085113L;
+ private static final Logger log = LoggerFactory.make();
+
+ private final Properties masked;
+ private final Properties fallBack;
+ private final String radix;
+ private transient Set<Object> propertyNames;
+
+ /**
+ * Provides a view to the provided Properties hiding
+ * all keys not starting with some [mask.].
+ * @param propsToMask the Properties containing the values.
+ * @param mask
+ */
+ public MaskedProperty(Properties propsToMask, String mask) {
+ this( propsToMask, mask, null );
+ }
+
+ /**
+ * Provides a view to the provided Properties hiding
+ * all keys not starting with some [mask.].
+ * If no value is found then a value is returned from propsFallBack,
+ * without masking.
+ * @param propsToMask
+ * @param mask
+ * @param propsFallBack
+ */
+ public MaskedProperty(Properties propsToMask, String mask, Properties propsFallBack) {
+ if ( propsToMask==null || mask==null ) {
+ throw new java.lang.IllegalArgumentException();
+ }
+ this.masked = propsToMask;
+ this.radix = mask + ".";
+ this.fallBack = propsFallBack;
+ }
+
+ @Override
+ public String getProperty(String key) {
+ String compositeKey = radix + key;
+ String value = masked.getProperty( compositeKey );
+ if ( value != null) {
+ log.trace( "found a match for key: [{}] value: {}", compositeKey, value );
+ return value;
+ }
+ else if ( fallBack != null ) {
+ return fallBack.getProperty( key );
+ }
+ else {
+ return null;
+ }
+ }
+
+ /**
+ * @throws IllegalArgumentException if the key is not a String instance
+ */
+ @Override
+ public synchronized boolean containsKey(Object key) {
+ if ( ! ( key instanceof String ) ) {
+ throw new IllegalArgumentException( "key must be a String" );
+ }
+ return getProperty( key.toString() ) != null;
+ }
+
+ @Override
+ public String getProperty(String key, String defaultValue) {
+ String val = getProperty( key );
+ return ( val == null ) ? defaultValue : val;
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void list(PrintStream out) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void list(PrintWriter out) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void load(InputStream inStream) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void loadFromXML(InputStream in) throws IOException,
+ InvalidPropertiesFormatException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Enumeration<?> propertyNames() {
+ initPropertyNames();
+ return Collections.enumeration( propertyNames );
+ }
+
+ private synchronized void initPropertyNames() {
+ if ( propertyNames != null) return;
+ Set<Object> maskedProperties = new TreeSet<Object>();
+ //we use keys to be safe and avoid CCE for non String key entries
+ Enumeration<?> maskedNames = masked.propertyNames();
+ while ( maskedNames.hasMoreElements() ) {
+ Object key = maskedNames.nextElement();
+ if ( String.class.isInstance( key ) ) {
+ String maskedProperty = (String) key;
+ if ( maskedProperty.startsWith( radix ) ) {
+ maskedProperties.add(maskedProperty.substring( radix.length(), maskedProperty.length() ) );
+ }
+ }
+ }
+ if ( fallBack != null ) {
+ Enumeration<?> fallBackNames = fallBack.propertyNames();
+ while ( fallBackNames.hasMoreElements() ) {
+ Object key = fallBackNames.nextElement();
+ if ( String.class.isInstance( key ) ) {
+ maskedProperties.add( key );
+ }
+ }
+ }
+ propertyNames = Collections.unmodifiableSet( maskedProperties );
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void save(OutputStream out, String comments) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Object setProperty(String key, String value) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void store(OutputStream out, String comments)
+ throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void storeToXML(OutputStream os, String comment,
+ String encoding) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void storeToXML(OutputStream os, String comment)
+ throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void clear() {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Object clone() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public synchronized boolean contains(Object value) {
+ initPropertyNames();
+ return propertyNames.contains( value );
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public boolean containsValue(Object value) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Enumeration<Object> elements() {
+ //TODO
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Set<java.util.Map.Entry<Object, Object>> entrySet() {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Object get(Object key) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public synchronized boolean isEmpty() {
+ initPropertyNames();
+ return propertyNames.isEmpty();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public synchronized Enumeration<Object> keys() {
+ initPropertyNames();
+ return Collections.enumeration( propertyNames );
+ }
+
+ @Override
+ public Set<Object> keySet() {
+ initPropertyNames();
+ return propertyNames;
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Object put(Object key, Object value) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public void putAll(Map<? extends Object, ? extends Object> t) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ protected void rehash() {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Object remove(Object key) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public synchronized int size() {
+ initPropertyNames();
+ return propertyNames.size();
+ }
+
+ @Override
+ public synchronized String toString() {
+ return masked.toString();
+ }
+
+ /**
+ * @throws UnsupportedOperationException
+ */
+ @Override
+ public Collection<Object> values() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public synchronized int hashCode() {
+ final int prime = 31;
+ int result = ( ( fallBack == null ) ? 0 : fallBack.hashCode() );
+ result = prime * result + masked.hashCode();
+ result = prime * result + radix.hashCode();
+ return result;
+ }
+
+ @Override
+ public synchronized boolean equals(Object obj) {
+ if ( this == obj )
+ return true;
+ if ( obj == null )
+ return false;
+ if ( getClass() != obj.getClass() )
+ return false;
+ final MaskedProperty other = (MaskedProperty) obj;
+ if ( fallBack == null ) {
+ if ( other.fallBack != null )
+ return false;
+ } else if ( ! fallBack.equals( other.fallBack ) )
+ return false;
+ if ( ! masked.equals( other.masked ) )
+ return false;
+ if ( ! radix.equals( other.radix ) )
+ return false;
+ return true;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/configuration/MaskedProperty.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,231 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.slf4j.Logger;
+
+import org.hibernate.Hibernate;
+import org.hibernate.util.StringHelper;
+import org.hibernate.search.Environment;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.QueueingProcessor;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.search.backend.impl.blackhole.BlackHoleBackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.jgroups.MasterJGroupsBackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.jgroups.SlaveJGroupsBackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.jms.JMSBackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory;
+import org.hibernate.search.batchindexing.Executors;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.engine.DocumentBuilderContainedEntity;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.PluginLoader;
+
+/**
+ * Batch work until {@link #performWorks} is called.
+ * The work is then executed synchronously or asynchronously.
+ *
+ * @author Emmanuel Bernard
+ */
+public class BatchedQueueingProcessor implements QueueingProcessor {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final boolean sync;
+ private final int batchSize;
+ private final ExecutorService executorService;
+ private final BackendQueueProcessorFactory backendQueueProcessorFactory;
+ private final SearchFactoryImplementor searchFactoryImplementor;
+
+ public BatchedQueueingProcessor(SearchFactoryImplementor searchFactoryImplementor, Properties properties) {
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.sync = isConfiguredAsSync( properties );
+
+ //default to a simple asynchronous operation
+ int threadPoolSize = ConfigurationParseHelper.getIntValue( properties, Environment.WORKER_THREADPOOL_SIZE, 1 );
+ //no queue limit
+ int queueSize = ConfigurationParseHelper.getIntValue(
+ properties, Environment.WORKER_WORKQUEUE_SIZE, Integer.MAX_VALUE
+ );
+
+ batchSize = ConfigurationParseHelper.getIntValue( properties, Environment.WORKER_BATCHSIZE, 0 );
+
+ if ( !sync ) {
+ /**
+ * If the queue limit is reached, the operation is executed by the main thread
+ */
+ executorService = Executors.newFixedThreadPool( threadPoolSize, "backend queueing processor", queueSize );
+ }
+ else {
+ executorService = null;
+ }
+ String backend = properties.getProperty( Environment.WORKER_BACKEND );
+ if ( StringHelper.isEmpty( backend ) || "lucene".equalsIgnoreCase( backend ) ) {
+ backendQueueProcessorFactory = new LuceneBackendQueueProcessorFactory();
+ }
+ else if ( "jms".equalsIgnoreCase( backend ) ) {
+ backendQueueProcessorFactory = new JMSBackendQueueProcessorFactory();
+ }
+ else if ( "blackhole".equalsIgnoreCase( backend ) ) {
+ backendQueueProcessorFactory = new BlackHoleBackendQueueProcessorFactory();
+ }
+ else if ( "jgroupsMaster".equals( backend ) ) {
+ backendQueueProcessorFactory = new MasterJGroupsBackendQueueProcessorFactory();
+ }
+ else if ( "jgroupsSlave".equals( backend ) ) {
+ backendQueueProcessorFactory = new SlaveJGroupsBackendQueueProcessorFactory();
+ }
+ else {
+ backendQueueProcessorFactory = PluginLoader.instanceFromName( BackendQueueProcessorFactory.class,
+ backend, BatchedQueueingProcessor.class, "processor" );
+ }
+ backendQueueProcessorFactory.initialize( properties, searchFactoryImplementor );
+ searchFactoryImplementor.setBackendQueueProcessorFactory( backendQueueProcessorFactory );
+ }
+
+ public void add(Work work, WorkQueue workQueue) {
+ //don't check for builder it's done in prepareWork
+ //FIXME WorkType.COLLECTION does not play well with batchSize
+ workQueue.add( work );
+ if ( batchSize > 0 && workQueue.size() >= batchSize ) {
+ WorkQueue subQueue = workQueue.splitQueue();
+ prepareWorks( subQueue );
+ performWorks( subQueue );
+ }
+ }
+
+ public void prepareWorks(WorkQueue workQueue) {
+ List<Work> queue = workQueue.getQueue();
+ int initialSize = queue.size();
+ List<LuceneWork> luceneQueue = new ArrayList<LuceneWork>( initialSize ); //TODO load factor for containedIn
+ /**
+ * Collection work type are process second, so if the owner entity has already been processed for whatever reason
+ * the work will be ignored.
+ * However if the owner entity has not been processed, an "UPDATE" work is executed
+ *
+ * Processing collection works last is mandatory to avoid reindexing a object to be deleted
+ */
+ processWorkByLayer( queue, initialSize, luceneQueue, Layer.FIRST );
+ processWorkByLayer( queue, initialSize, luceneQueue, Layer.SECOND );
+ workQueue.setSealedQueue( luceneQueue );
+ }
+
+ private <T> void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
+ for ( int i = 0; i < initialSize; i++ ) {
+ @SuppressWarnings("unchecked")
+ Work<T> work = queue.get( i );
+ if ( work != null ) {
+ if ( layer.isRightLayer( work.getType() ) ) {
+ queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
+ addWorkToBuilderQueue( luceneQueue, work );
+ }
+ }
+ }
+ }
+
+ private <T> void addWorkToBuilderQueue(List<LuceneWork> luceneQueue, Work<T> work) {
+ @SuppressWarnings("unchecked")
+ Class<T> entityClass = work.getEntityClass() != null ?
+ work.getEntityClass() :
+ Hibernate.getClass( work.getEntity() );
+ DocumentBuilderIndexedEntity<T> entityBuilder = searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityClass );
+ if ( entityBuilder != null ) {
+ entityBuilder.addWorkToQueue(
+ entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor
+ );
+ return;
+ }
+
+ //might be a entity contained in
+ DocumentBuilderContainedEntity<T> containedInBuilder = searchFactoryImplementor.getDocumentBuilderContainedEntity( entityClass );
+ if ( containedInBuilder != null ) {
+ containedInBuilder.addWorkToQueue(
+ entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor
+ );
+ }
+ }
+
+ public void performWorks(WorkQueue workQueue) {
+ Runnable processor = backendQueueProcessorFactory.getProcessor( workQueue.getSealedQueue() );
+ if ( sync ) {
+ processor.run();
+ }
+ else {
+ executorService.execute( processor );
+ }
+ }
+
+ public void cancelWorks(WorkQueue workQueue) {
+ workQueue.clear();
+ }
+
+ public void close() {
+ //gracefully stop
+ if ( executorService != null && !executorService.isShutdown() ) {
+ executorService.shutdown();
+ try {
+ executorService.awaitTermination( Long.MAX_VALUE, TimeUnit.SECONDS );
+ }
+ catch ( InterruptedException e ) {
+ log.error( "Unable to properly shut down asynchronous indexing work", e );
+ }
+ }
+ //and stop the backend
+ backendQueueProcessorFactory.close();
+ }
+
+ private static enum Layer {
+ FIRST,
+ SECOND;
+
+ public boolean isRightLayer(WorkType type) {
+ if ( this == FIRST && type != WorkType.COLLECTION ) {
+ return true;
+ }
+ return this == SECOND && type == WorkType.COLLECTION;
+ }
+ }
+
+ /**
+ * @param properties the configuration to parse
+ * @return true if the configuration uses sync indexing
+ */
+ public static boolean isConfiguredAsSync(Properties properties){
+ //default to sync if none defined
+ return !"async".equalsIgnoreCase( properties.getProperty( Environment.WORKER_EXECUTION ) );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/EventSourceTransactionContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/EventSourceTransactionContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/EventSourceTransactionContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,126 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl;
+
+import java.io.Serializable;
+
+import javax.transaction.Synchronization;
+
+import org.hibernate.Transaction;
+import org.hibernate.event.EventSource;
+import org.hibernate.event.FlushEventListener;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.TransactionContext;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Implementation of the transactional context on top of an EventSource (Session)
+ *
+ * @author Navin Surtani - navin(a)surtani.org
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class EventSourceTransactionContext implements TransactionContext, Serializable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final EventSource eventSource;
+
+ //this transient is required to break recursive serialization
+ private transient FullTextIndexEventListener flushListener;
+
+ //constructor time is too early to define the value of realTxInProgress,
+ //postpone it, otherwise doing
+ // " openSession - beginTransaction "
+ //will behave as "out of transaction" in the whole session lifespan.
+ private Boolean realTxInProgress = null;
+
+ public EventSourceTransactionContext(EventSource eventSource) {
+ this.eventSource = eventSource;
+ this.flushListener = getIndexWorkFlushEventListener();
+ }
+
+ public Object getTransactionIdentifier() {
+ if ( isRealTransactionInProgress() ) {
+ return eventSource.getTransaction();
+ }
+ else {
+ return eventSource;
+ }
+ }
+
+ public void registerSynchronization(Synchronization synchronization) {
+ if ( isRealTransactionInProgress() ) {
+ Transaction transaction = eventSource.getTransaction();
+ transaction.registerSynchronization( synchronization );
+ }
+ else {
+ //registerSynchronization is only called if isRealTransactionInProgress or if
+ // a flushListener was found; still we might need to find the listener again
+ // as it might have been cleared by serialization (is transient).
+ flushListener = getIndexWorkFlushEventListener();
+ if ( flushListener != null ) {
+ flushListener.addSynchronization( eventSource, synchronization );
+ }
+ else {
+ //shouldn't happen if the code about serialization is fine:
+ throw new SearchException( "AssertionFailure: flushListener not registered any more.");
+ }
+ }
+ }
+
+ private FullTextIndexEventListener getIndexWorkFlushEventListener() {
+ if ( this.flushListener != null) {
+ //for the "transient" case: might have been nullified.
+ return flushListener;
+ }
+ FlushEventListener[] flushEventListeners = eventSource.getListeners().getFlushEventListeners();
+ for (FlushEventListener listener : flushEventListeners) {
+ if ( listener.getClass().equals( FullTextIndexEventListener.class ) ) {
+ return (FullTextIndexEventListener) listener;
+ }
+ }
+ log.debug( "FullTextIndexEventListener was not registered as FlushEventListener" );
+ return null;
+ }
+
+ //The code is not really fitting the method name;
+ //(unless you consider a flush as a mini-transaction)
+ //This is because we want to behave as "inTransaction" if the flushListener is registered.
+ public boolean isTransactionInProgress() {
+ // either it is a real transaction, or if we are capable to manage this in the IndexWorkFlushEventListener
+ return getIndexWorkFlushEventListener() != null || isRealTransactionInProgress();
+ }
+
+ private boolean isRealTransactionInProgress() {
+ if ( realTxInProgress == null ) {
+ realTxInProgress = eventSource.isTransactionInProgress();
+ }
+ return realTxInProgress;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/EventSourceTransactionContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,95 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl;
+
+import javax.transaction.Status;
+import javax.transaction.Synchronization;
+
+import org.hibernate.search.backend.QueueingProcessor;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.util.WeakIdentityHashMap;
+
+/**
+ * Execute some work inside a transaction synchronization
+ *
+ * @author Emmanuel Bernard
+ */
+public class PostTransactionWorkQueueSynchronization implements Synchronization {
+
+ /**
+ * FullTextIndexEventListener is using a WeakIdentityHashMap<Session,Synchronization>
+ * So make sure all Synchronization implementations don't have any
+ * (direct or indirect) reference to the Session.
+ */
+
+ private final QueueingProcessor queueingProcessor;
+ private boolean consumed;
+ private final WeakIdentityHashMap queuePerTransaction;
+ private WorkQueue queue = new WorkQueue();
+
+ /**
+ * in transaction work
+ */
+ public PostTransactionWorkQueueSynchronization(QueueingProcessor queueingProcessor, WeakIdentityHashMap queuePerTransaction) {
+ this.queueingProcessor = queueingProcessor;
+ this.queuePerTransaction = queuePerTransaction;
+ }
+
+ public void add(Work work) {
+ queueingProcessor.add( work, queue );
+ }
+
+ public boolean isConsumed() {
+ return consumed;
+ }
+
+ public void beforeCompletion() {
+ queueingProcessor.prepareWorks(queue);
+ }
+
+ public void afterCompletion(int i) {
+ try {
+ if ( Status.STATUS_COMMITTED == i ) {
+ queueingProcessor.performWorks(queue);
+ }
+ else {
+ queueingProcessor.cancelWorks(queue);
+ }
+ }
+ finally {
+ consumed = true;
+ //clean the Synchronization per Transaction
+ //not needed stricto sensus but a cleaner approach and faster than the GC
+ if (queuePerTransaction != null) queuePerTransaction.removeValue( this );
+ }
+ }
+
+ public void flushWorks() {
+ WorkQueue subQueue = queue.splitQueue();
+ queueingProcessor.prepareWorks( subQueue );
+ queueingProcessor.performWorks( subQueue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/PostTransactionWorkQueueSynchronization.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/TransactionalWorker.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/TransactionalWorker.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/TransactionalWorker.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,106 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl;
+
+import java.util.Properties;
+
+import javax.transaction.Synchronization;
+
+import org.hibernate.search.backend.QueueingProcessor;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkQueue;
+import org.hibernate.search.backend.Worker;
+import org.hibernate.search.backend.TransactionContext;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.WeakIdentityHashMap;
+import org.slf4j.Logger;
+
+/**
+ * Queue works per transaction.
+ * If out of transaction, the work is executed right away
+ * <p/>
+ * When <code>hibernate.search.worker.type</code> is set to <code>async</code>
+ * the work is done in a separate thread (threads are pooled)
+ *
+ * @author Emmanuel Bernard
+ */
+public class TransactionalWorker implements Worker {
+
+ //note: there is one Worker instance per SearchFactory, reused concurrently for all sessions.
+
+ private static final Logger log = LoggerFactory.make();
+
+ //this is being used from different threads, but doesn't need a
+ //synchronized map since for a given transaction, we have not concurrent access
+ protected final WeakIdentityHashMap<Object, Synchronization> synchronizationPerTransaction = new WeakIdentityHashMap<Object, Synchronization>();
+ private QueueingProcessor queueingProcessor;
+
+ public void performWork(Work work, TransactionContext transactionContext) {
+ if ( transactionContext.isTransactionInProgress() ) {
+ Object transactionIdentifier = transactionContext.getTransactionIdentifier();
+ PostTransactionWorkQueueSynchronization txSync = ( PostTransactionWorkQueueSynchronization )
+ synchronizationPerTransaction.get( transactionIdentifier );
+ if ( txSync == null || txSync.isConsumed() ) {
+ txSync = new PostTransactionWorkQueueSynchronization(
+ queueingProcessor, synchronizationPerTransaction
+ );
+ transactionContext.registerSynchronization( txSync );
+ synchronizationPerTransaction.put( transactionIdentifier, txSync );
+ }
+ txSync.add( work );
+ }
+ else {
+ // this is a workaround: isTransactionInProgress should return "true"
+ // for correct configurations.
+ log.warn( "It appears changes are being pushed to the index out of a transaction. " +
+ "Register the IndexWorkFlushEventListener listener on flush to correctly manage Collections!" );
+ WorkQueue queue = new WorkQueue( 2 ); //one work can be split
+ queueingProcessor.add( work, queue );
+ queueingProcessor.prepareWorks( queue );
+ queueingProcessor.performWorks( queue );
+ }
+ }
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
+ this.queueingProcessor = new BatchedQueueingProcessor( searchFactory, props );
+ }
+
+ public void close() {
+ queueingProcessor.close();
+ }
+
+ public void flushWorks(TransactionContext transactionContext) {
+ if ( transactionContext.isTransactionInProgress() ) {
+ Object transaction = transactionContext.getTransactionIdentifier();
+ PostTransactionWorkQueueSynchronization txSync = ( PostTransactionWorkQueueSynchronization )
+ synchronizationPerTransaction.get( transaction );
+ if ( txSync != null && !txSync.isConsumed() ) {
+ txSync.flushWorks();
+ }
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/TransactionalWorker.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/BatchBackend.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/BatchBackend.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/BatchBackend.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,80 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.batchlucene;
+
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Implementors of this interface are not drop-in replacements for the standard BackendQueueProcessorFactory
+ * but are meant to be used only during batch processing.
+ * The order of LuceneWork(s) processed is not guaranteed as the queue is consumed by several
+ * concurrent workers.
+ *
+ * @author Sanne Grinovero
+ */
+public interface BatchBackend {
+
+ /**
+ * Used at startup, called once as first method.
+ * @param props all configuration properties
+ * @param searchFactory the client
+ */
+ void initialize(Properties props, MassIndexerProgressMonitor monitor, SearchFactoryImplementor searchFactory);
+
+ /**
+ * Enqueues one work to be processed asynchronously
+ * @param work
+ * @throws InterruptedException if the current thread is interrupted while
+ * waiting for the work queue to have enough space.
+ */
+ void enqueueAsyncWork(LuceneWork work) throws InterruptedException;
+
+ /**
+ * Does one work in sync
+ * @param work
+ * @throws InterruptedException
+ */
+ void doWorkInSync(LuceneWork work);
+
+ /**
+ * Waits until all work is done and terminates the executors.
+ * IndexWriter is not closed yet: work in sync can still be processed.
+ * @throws InterruptedException if the current thread is interrupted
+ * while waiting for the enqueued tasks to be finished.
+ */
+ void stopAndFlush(long timeout, TimeUnit unit) throws InterruptedException;
+
+ /**
+ * Used to shutdown and release resources.
+ * No other method should be used after this one.
+ */
+ void close();
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/BatchBackend.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/DirectoryProviderWorkspace.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/DirectoryProviderWorkspace.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/DirectoryProviderWorkspace.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,148 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.batchlucene;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.impl.lucene.works.LuceneWorkDelegate;
+import org.hibernate.search.backend.impl.lucene.works.LuceneWorkVisitor;
+import org.hibernate.search.batchindexing.Executors;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Collects all resources needed to apply changes to one index.
+ * They are reused across the processing of all LuceneWork.
+ *
+ * !! Be careful to ensure the IndexWriter is eventually closed,
+ * or the index will stay locked.
+ * @see close();
+ *
+ * @author Sanne Grinovero
+ */
+class DirectoryProviderWorkspace {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final ExecutorService executor;
+ private final LuceneWorkVisitor visitor;
+ private final Workspace workspace;
+ private final MassIndexerProgressMonitor monitor;
+
+ private final AtomicBoolean closed = new AtomicBoolean( false );
+
+ DirectoryProviderWorkspace(SearchFactoryImplementor searchFactoryImp, DirectoryProvider<?> dp, MassIndexerProgressMonitor monitor, int maxThreads) {
+ if ( maxThreads < 1 ) {
+ throw new IllegalArgumentException( "maxThreads needs to be at least 1" );
+ }
+ this.monitor = monitor;
+ workspace = new Workspace( searchFactoryImp, dp );
+ visitor = new LuceneWorkVisitor( workspace );
+ executor = Executors.newFixedThreadPool( maxThreads, "indexwriter" );
+ }
+
+ /**
+ * Notify the indexwriting threads that they should quit at the end of the enqueued
+ * tasks. Waits for the end of the current queue, then commits changes.
+ * @throws InterruptedException
+ */
+ public void stopAndFlush(long timeout, TimeUnit unit) throws InterruptedException {
+ checkIsNotClosed();
+ executor.shutdown(); //it becomes illegal to add more work
+ executor.awaitTermination( timeout, unit );
+ workspace.commitIndexWriter(); //commits changes if any
+ //does not yet close the IndexWriter !
+ }
+
+ /**
+ * Used to do some tasks at the beginning and/or at the end of the main batch
+ * operations. This work is not done async.
+ * @param work
+ */
+ public void doWorkInSync(LuceneWork work) {
+ checkIsNotClosed();
+ LuceneWorkDelegate delegate = work.getWorkDelegate( visitor );
+ delegate.performWork( work, workspace.getIndexWriter( true ) );
+ delegate.logWorkDone( work , monitor );
+ //if the IndexWriter was opened, it's not closed now.
+ }
+
+ public void enqueueAsyncWork(LuceneWork work) {
+ //no need to check if we are closed here, better check inside the async method
+ executor.execute( new AsyncIndexRunnable( work ) );
+ }
+
+ /**
+ * Makes sure the executor is closed and closes the IndexWriter.
+ */
+ public void close() {
+ if ( closed.compareAndSet( false, true ) ) {
+ try {
+ if ( ! executor.isShutdown() ) {
+ log.error( "Terminating batch work! Index might end up in inconsistent state." );
+ executor.shutdownNow();
+ }
+ }
+ finally {
+ workspace.closeIndexWriter();
+ }
+ }
+ else {
+ checkIsNotClosed(); //will throw an appropriate exception
+ }
+ }
+
+ /**
+ * Verifies this is not closed yet, or throws an exception.
+ */
+ private void checkIsNotClosed() {
+ if ( closed.get() ) {
+ throw new SearchException( "Batch DirectoryProviderWorkspace is closed already" );
+ }
+ }
+
+ private class AsyncIndexRunnable implements Runnable {
+
+ private final LuceneWork work;
+
+ AsyncIndexRunnable(LuceneWork work) {
+ this.work = work;
+ }
+
+ public void run() {
+ doWorkInSync( work );
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/DirectoryProviderWorkspace.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/LuceneBatchBackend.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/LuceneBatchBackend.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/LuceneBatchBackend.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,144 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.batchlucene;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.search.backend.impl.lucene.DpSelectionVisitor;
+import org.hibernate.search.backend.impl.lucene.PerDirectoryWorkProcessor;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * First EXPERIMENTAL BatchBackend; this is not meant to be used as a regular
+ * backend, only to apply batch changes to the index. Several threads
+ * are used to make changes to each index, so order of Work processing is not guaranteed.
+ *
+ * @author Sanne Grinovero
+ */
+public class LuceneBatchBackend implements BatchBackend {
+
+ public static final String CONCURRENT_WRITERS = Environment.BATCH_BACKEND + ".concurrent_writers";
+
+ private static final DpSelectionVisitor providerSelectionVisitor = new DpSelectionVisitor();
+
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private final Map<DirectoryProvider<?>,DirectoryProviderWorkspace> resourcesMap = new HashMap<DirectoryProvider<?>,DirectoryProviderWorkspace>();
+ private final PerDirectoryWorkProcessor asyncWorker = new AsyncBatchPerDirectoryWorkProcessor();
+ private final PerDirectoryWorkProcessor syncWorker = new SyncBatchPerDirectoryWorkProcessor();
+
+ public void initialize(Properties cfg, MassIndexerProgressMonitor monitor, SearchFactoryImplementor searchFactoryImplementor) {
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ int maxThreadsPerIndex = ConfigurationParseHelper.getIntValue( cfg, "concurrent_writers", 2 );
+ if ( maxThreadsPerIndex < 1 ) {
+ throw new SearchException( "concurrent_writers for batch backend must be at least 1." );
+ }
+ for ( DirectoryProvider<?> dp : searchFactoryImplementor.getDirectoryProviders() ) {
+ DirectoryProviderWorkspace resources = new DirectoryProviderWorkspace( searchFactoryImplementor, dp, monitor, maxThreadsPerIndex );
+ resourcesMap.put( dp, resources );
+ }
+ }
+
+ public void enqueueAsyncWork(LuceneWork work) throws InterruptedException {
+ sendWorkToShards( work, asyncWorker );
+ }
+
+ public void doWorkInSync(LuceneWork work) {
+ try {
+ sendWorkToShards( work, syncWorker );
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ //doesn't happen, see SyncBatchPerDirectoryWorkProcessor below: is missing the throws.
+ throw new SearchException( "AssertionFailure" );
+ }
+ }
+
+ /**
+ * Stops the background threads and flushes changes;
+ * Please note the timeout is applied to each index in
+ * sequence, so it might take as much time as timeout*directoryproviders
+ */
+ public void stopAndFlush(long timeout, TimeUnit unit) throws InterruptedException {
+ for ( DirectoryProviderWorkspace res : resourcesMap.values() ) {
+ res.stopAndFlush( timeout, unit );
+ }
+ }
+
+ public void close() {
+ Throwable error = null;
+ for ( DirectoryProviderWorkspace res : resourcesMap.values() ) {
+ try {
+ res.close();
+ }
+ catch (Throwable t) {
+ //make sure to try closing all IndexWriters
+ error = t;
+ }
+ }
+ if ( error != null ) {
+ throw new SearchException( "Error while closing massindexer", error );
+ }
+ }
+
+ private void sendWorkToShards(LuceneWork work, PerDirectoryWorkProcessor worker) throws InterruptedException {
+ final Class<?> entityType = work.getEntityClass();
+ DocumentBuilderIndexedEntity<?> documentBuilder = searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityType );
+ IndexShardingStrategy shardingStrategy = documentBuilder.getDirectoryProviderSelectionStrategy();
+ work.getWorkDelegate( providerSelectionVisitor ).addAsPayLoadsToQueue( work, shardingStrategy, worker );
+ }
+
+ /**
+ * Implements a PerDirectoryWorkProcessor to enqueue work Asynchronously.
+ */
+ private class AsyncBatchPerDirectoryWorkProcessor implements PerDirectoryWorkProcessor {
+
+ public void addWorkToDpProcessor(DirectoryProvider<?> dp, LuceneWork work) throws InterruptedException {
+ resourcesMap.get( dp ).enqueueAsyncWork( work );
+ }
+
+ }
+
+ /**
+ * Implements a PerDirectoryWorkProcessor to enqueue work Synchronously.
+ */
+ private class SyncBatchPerDirectoryWorkProcessor implements PerDirectoryWorkProcessor {
+
+ public void addWorkToDpProcessor(DirectoryProvider<?> dp, LuceneWork work) {
+ resourcesMap.get( dp ).doWorkInSync( work );
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/batchlucene/LuceneBatchBackend.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/BlackHoleBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/BlackHoleBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/BlackHoleBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,74 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.blackhole;
+
+import java.util.List;
+import java.util.Properties;
+
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * This backend does not do anything: the Documents are not
+ * sent to any index but are discarded.
+ * Useful to identify the bottleneck in indexing performance problems,
+ * fully disabling the backend system but still building the Documents
+ * needed to update an index (loading data from DB).
+ *
+ * @author Sanne Grinovero
+ */
+public class BlackHoleBackendQueueProcessorFactory implements BackendQueueProcessorFactory {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final NoOp noOp = new NoOp();
+
+ public Runnable getProcessor(List<LuceneWork> queue) {
+ return noOp;
+ }
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
+ // no-op
+ log.warn( "initialized \"blackhole\" backend. Index changes will be prepared but discarded!" );
+ }
+
+ public void close() {
+ // no-op
+ log.info( "closed \"blackhole\" backend." );
+ }
+
+ private static class NoOp implements Runnable {
+
+ public void run() {
+ // no-op
+ log.debug( "Discarding a list of LuceneWork" );
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/blackhole/BlackHoleBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,101 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jgroups;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.jgroups.ChannelClosedException;
+import org.jgroups.ChannelNotConnectedException;
+import org.jgroups.Message;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Responsible for sending Lucene works from slave nodes to master node
+ *
+ * @author Lukasz Moren
+ */
+public class JGroupsBackendQueueProcessor implements Runnable {
+
+ protected static final Logger log = LoggerFactory.make();
+
+ private final JGroupsBackendQueueProcessorFactory factory;
+ private final List<LuceneWork> queue;
+
+ public JGroupsBackendQueueProcessor(List<LuceneWork> queue, JGroupsBackendQueueProcessorFactory factory) {
+ this.factory = factory;
+ this.queue = queue;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void run() {
+ List<LuceneWork> filteredQueue = new ArrayList<LuceneWork>( queue );
+ log.trace( "Preparing {} Lucene works to be sent to master node.", filteredQueue.size() );
+
+ for ( LuceneWork work : queue ) {
+ if ( work instanceof OptimizeLuceneWork ) {
+ //we don't want optimization to be propagated
+ filteredQueue.remove( work );
+ }
+ }
+ log.trace(
+ "Filtering: optimized Lucene works are not going to be sent to master node. There is {} Lucene works after filtering.",
+ filteredQueue.size()
+ );
+ if ( filteredQueue.size() == 0 ) {
+ log.trace(
+ "Nothing to send. Propagating works to a cluster has been skipped."
+ );
+ return;
+ }
+
+ /* Creates and send message with lucene works to master.
+ * As long as message destination address is null, Lucene works will be received by all listeners that implements
+ * org.jgroups.MessageListener interface, multiple master nodes in cluster are allowed. */
+ try {
+ Message message = new Message( null, factory.getAddress(), ( Serializable ) filteredQueue );
+ factory.getChannel().send( message );
+ log.trace(
+ "Lucene works have been sent from slave {} to master node.", factory.getAddress()
+ );
+ }
+ catch ( ChannelNotConnectedException e ) {
+ throw new SearchException(
+ "Unable to send Lucene work. Channel is not connected to: "
+ + factory.getClusterName()
+ );
+ }
+ catch ( ChannelClosedException e ) {
+ throw new SearchException( "Unable to send Lucene work. Attempt to send message on closed JGroups channel" );
+ }
+
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,205 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jgroups;
+
+import java.net.URL;
+import java.util.List;
+import java.util.Properties;
+
+import org.jgroups.Address;
+import org.jgroups.Channel;
+import org.jgroups.ChannelException;
+import org.jgroups.JChannel;
+import org.slf4j.Logger;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.XMLHelper;
+import org.hibernate.util.ConfigHelper;
+
+
+/**
+ * Common base class for Master and Slave BackendQueueProcessorFactories
+ *
+ * @author Lukasz Moren
+ */
+public abstract class JGroupsBackendQueueProcessorFactory implements BackendQueueProcessorFactory {
+
+ protected static final Logger log = LoggerFactory.make();
+
+ public static final String JGROUPS_PREFIX = Environment.WORKER_BACKEND + ".jgroups.";
+
+ public static final String CONFIGURATION_STRING = JGROUPS_PREFIX + "configurationString";
+ public static final String CONFIGURATION_XML = JGROUPS_PREFIX + "configurationXml";
+ public static final String CONFIGURATION_FILE = JGROUPS_PREFIX + "configurationFile";
+ private static final String DEFAULT_JGROUPS_CONFIGURATION_FILE = "flush-udp.xml";
+
+ public static final String JG_CLUSTER_NAME = JGROUPS_PREFIX + "clusterName";
+
+ protected String clusterName = "HSearchCluster";
+ protected SearchFactoryImplementor searchFactory;
+ protected Channel channel = null;
+ protected Address address;
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
+ this.searchFactory = searchFactory;
+
+ if ( props.containsKey( JG_CLUSTER_NAME ) ) {
+ setClusterName( props.getProperty( JG_CLUSTER_NAME ) );
+ }
+
+ prepareJGroupsChannel( props );
+ }
+
+ private void prepareJGroupsChannel(Properties props) {
+ log.info( "Starting JGroups Channel" );
+ try {
+ buildChannel( props );
+ channel.setOpt( Channel.AUTO_RECONNECT, Boolean.TRUE );
+ channel.connect( clusterName );
+ }
+ catch ( ChannelException e ) {
+ throw new SearchException( "Unable to connect to: [" + clusterName + "] JGroups channel" );
+ }
+ log.info( "Connected to cluster [ {} ]. The node address is {}", clusterName, getAddress() );
+
+ if ( !channel.flushSupported() ) {
+ log.warn(
+ "FLUSH is not present in your JGroups stack! FLUSH is needed to ensure messages are not dropped while new nodes join the cluster. Will proceed, but inconsistencies may arise!"
+ );
+ }
+ }
+
+ /**
+ * Reads configuration and builds channnel with its base.
+ * In order of preference - we first look for an external JGroups file, then a set of XML properties, and
+ * finally the legacy JGroups String properties.
+ *
+ * @param props configuration file
+ */
+ private void buildChannel(Properties props) {
+ String cfg;
+ if ( props != null ) {
+ if ( props.containsKey( CONFIGURATION_FILE ) ) {
+ cfg = props.getProperty( CONFIGURATION_FILE );
+ try {
+ channel = new JChannel( ConfigHelper.locateConfig( cfg ) );
+ }
+ catch ( Exception e ) {
+ log.error( "Error while trying to create a channel using config files: {}", cfg );
+ throw new SearchException( e );
+ }
+ }
+
+ if ( props.containsKey( CONFIGURATION_XML ) ) {
+ cfg = props.getProperty( CONFIGURATION_XML );
+ try {
+ channel = new JChannel( XMLHelper.elementFromString( cfg ) );
+ }
+ catch ( Exception e ) {
+ log.error( "Error while trying to create a channel using config XML: {}", cfg );
+ throw new SearchException( e );
+ }
+ }
+
+ if ( props.containsKey( CONFIGURATION_STRING ) ) {
+ cfg = props.getProperty( CONFIGURATION_STRING );
+ try {
+ channel = new JChannel( cfg );
+ }
+ catch ( Exception e ) {
+ log.error( "Error while trying to create a channel using config string: {}", cfg );
+ throw new SearchException( e );
+ }
+ }
+ }
+
+ if ( channel == null ) {
+ log.info(
+ "Unable to use any JGroups configuration mechanisms provided in properties {}. Using default JGroups configuration file!",
+ props
+ );
+ try {
+ URL fileUrl = ConfigHelper.locateConfig( DEFAULT_JGROUPS_CONFIGURATION_FILE );
+ if ( fileUrl != null ) {
+ channel = new JChannel( fileUrl );
+ }
+ else {
+ log.warn(
+ "Default JGroups configuration file was not found. Attempt to start JGroups channel with default configuration!"
+ );
+ channel = new JChannel();
+ }
+ }
+ catch ( ChannelException e ) {
+ throw new SearchException( "Unable to start JGroups channel", e );
+ }
+ }
+ }
+
+ public abstract Runnable getProcessor(List<LuceneWork> queue);
+
+ public void close() {
+ try {
+ if ( channel != null && channel.isOpen() ) {
+ log.info( "Disconnecting and closing JGroups Channel" );
+ channel.disconnect();
+ channel.close();
+ }
+ }
+ catch ( Exception toLog ) {
+ log.error( "Problem closing channel; setting it to null", toLog );
+ channel = null;
+ }
+ }
+
+ public Channel getChannel() {
+ return channel;
+ }
+
+ public void setClusterName(String clusterName) {
+ this.clusterName = clusterName;
+ }
+
+ public String getClusterName() {
+ return clusterName;
+ }
+
+ /**
+ * Cluster's node address
+ *
+ * @return Address
+ */
+ public Address getAddress() {
+ if ( address == null && channel != null ) {
+ address = channel.getLocalAddress();
+ }
+ return address;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsMasterMessageListener.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsMasterMessageListener.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsMasterMessageListener.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,111 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jgroups;
+
+import java.util.List;
+
+import org.jgroups.Address;
+import org.jgroups.Message;
+import org.jgroups.Receiver;
+import org.jgroups.View;
+import org.slf4j.Logger;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Listen for messages from slave nodes and apply them into <code>LuceneBackendQueueProcessor</code>
+ *
+ * @author Lukasz Moren
+ * @see org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory
+ * @see org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessor
+ * @see org.jgroups.Receiver
+ */
+public class JGroupsMasterMessageListener implements Receiver {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private SearchFactoryImplementor searchFactory;
+
+ public JGroupsMasterMessageListener(SearchFactoryImplementor searchFactory) {
+ this.searchFactory = searchFactory;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void receive(Message message) {
+ List<LuceneWork> queue;
+ try {
+ queue = ( List<LuceneWork> ) message.getObject();
+ }
+ catch ( ClassCastException e ) {
+ log.error( "Illegal object retrieved from message.", e );
+ return;
+ }
+
+ if ( queue != null && !queue.isEmpty() ) {
+ log.debug(
+ "There are {} Lucene docs received from slave node {} to be processed by master",
+ queue.size(),
+ message.getSrc()
+ );
+ Runnable worker = getWorker( queue );
+ worker.run();
+ }
+ else {
+ log.warn( "Received null or empty Lucene works list in message." );
+ }
+ }
+
+ private Runnable getWorker(List<LuceneWork> queue) {
+ Runnable processor;
+ processor = searchFactory.getBackendQueueProcessorFactory().getProcessor( queue );
+ return processor;
+ }
+
+ // ------------------------------------------------------------------------------------------------------------------
+ // Implementations of JGroups interfaces
+ // ------------------------------------------------------------------------------------------------------------------
+
+ public byte[] getState() {
+ return null;
+ }
+
+ public void setState(byte[] state) {
+ //no-op
+ }
+
+ public void viewAccepted(View view) {
+ log.info( "Received new cluster view: {}", view );
+ }
+
+ public void suspect(Address suspected_mbr) {
+ //no-op
+ }
+
+ public void block() {
+ //no-op
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/JGroupsMasterMessageListener.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/MasterJGroupsBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/MasterJGroupsBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/MasterJGroupsBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,82 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jgroups;
+
+import java.util.List;
+import java.util.Properties;
+
+import org.jgroups.Receiver;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Backend factory used in JGroups clustering mode in master node.
+ * Wraps {@link LuceneBackendQueueProcessorFactory} with providing extra
+ * functionality to receive Lucene works from slave nodes.
+ *
+ * @author Lukasz Moren
+ * @see org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory
+ * @see org.hibernate.search.backend.impl.jgroups.SlaveJGroupsBackendQueueProcessorFactory
+ */
+public class MasterJGroupsBackendQueueProcessorFactory extends JGroupsBackendQueueProcessorFactory {
+
+ private LuceneBackendQueueProcessorFactory luceneBackendQueueProcessorFactory;
+ private Receiver masterListener;
+
+ @Override
+ public void initialize(Properties props, SearchFactoryImplementor searchFactory) {
+ super.initialize( props, searchFactory );
+ initLuceneBackendQueueProcessorFactory( props, searchFactory );
+
+ registerMasterListener();
+ }
+
+ public Runnable getProcessor(List<LuceneWork> queue) {
+ return luceneBackendQueueProcessorFactory.getProcessor( queue );
+ }
+
+ private void registerMasterListener() {
+ //register JGroups receiver in master node to get Lucene docs from slave nodes
+ masterListener = new JGroupsMasterMessageListener( searchFactory );
+ channel.setReceiver( masterListener );
+ }
+
+ private void initLuceneBackendQueueProcessorFactory(Properties props, SearchFactoryImplementor searchFactory) {
+ luceneBackendQueueProcessorFactory = new LuceneBackendQueueProcessorFactory();
+ luceneBackendQueueProcessorFactory.initialize( props, searchFactory );
+ }
+
+ public Receiver getMasterListener() {
+ return masterListener;
+ }
+
+ @Override
+ public void close() {
+ super.close();
+ luceneBackendQueueProcessorFactory.close();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/MasterJGroupsBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/SlaveJGroupsBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/SlaveJGroupsBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/SlaveJGroupsBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jgroups;
+
+import java.util.List;
+
+import org.hibernate.search.backend.LuceneWork;
+
+/**
+ * @author Lukasz Moren
+ */
+public class SlaveJGroupsBackendQueueProcessorFactory extends JGroupsBackendQueueProcessorFactory {
+
+ public Runnable getProcessor(List<LuceneWork> queue) {
+ return new JGroupsBackendQueueProcessor( queue, this );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jgroups/SlaveJGroupsBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/AbstractJMSHibernateSearchController.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/AbstractJMSHibernateSearchController.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/AbstractJMSHibernateSearchController.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,135 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jms;
+
+import java.util.List;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import javax.jms.JMSException;
+import javax.jms.Message;
+import javax.jms.MessageListener;
+import javax.jms.ObjectMessage;
+
+import org.slf4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.ContextHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Implement the Hibernate Search controller responsible for processing the
+ * work send through JMS by the slave nodes.
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class AbstractJMSHibernateSearchController implements MessageListener {
+ private static final Logger log = LoggerFactory.make();
+
+ /**
+ * Return the current or give a new session
+ * This session is not used per se, but is the link to access the Search configuration.
+ * <p>
+ * A typical EJB 3.0 usecase would be to get the session from the container (injected)
+ * eg in JBoss EJB 3.0
+ * <p>
+ * <code>
+ * @PersistenceContext private Session session;<br>
+ * <br>
+ * protected Session getSession() {<br>
+ * return session<br>
+ * }<br>
+ * </code>
+ * <p>
+ * eg in any container<br>
+ * <code>
+ * @PersistenceContext private EntityManager entityManager;<br>
+ * <br>
+ * protected Session getSession() {<br>
+ * return (Session) entityManager.getdelegate();<br>
+ * }<br>
+ * </code>
+ */
+ protected abstract Session getSession();
+
+ /**
+ * Ensure to clean the resources after use.
+ * If the session has been directly or indirectly injected, this method is empty
+ */
+ protected abstract void cleanSessionIfNeeded(Session session);
+
+ /**
+ * Process the Hibernate Search work queues received
+ */
+ public void onMessage(Message message) {
+ if ( !( message instanceof ObjectMessage ) ) {
+ log.error( "Incorrect message type: {}", message.getClass() );
+ return;
+ }
+ ObjectMessage objectMessage = (ObjectMessage) message;
+ List<LuceneWork> queue;
+ try {
+ queue = (List<LuceneWork>) objectMessage.getObject();
+ }
+ catch (JMSException e) {
+ log.error( "Unable to retrieve object from message: " + message.getClass(), e );
+ return;
+ }
+ catch (ClassCastException e) {
+ log.error( "Illegal object retrieved from message", e );
+ return;
+ }
+ Runnable worker = getWorker( queue );
+ worker.run();
+ }
+
+ private Runnable getWorker(List<LuceneWork> queue) {
+ //FIXME casting sucks because we do not control what get session from
+ Session session = getSession();
+ Runnable processor = null;
+
+ try {
+ SearchFactoryImplementor factory = ContextHelper.getSearchFactory( session );
+ processor = factory.getBackendQueueProcessorFactory().getProcessor( queue );
+ }
+ finally {
+ cleanSessionIfNeeded(session);
+ }
+ return processor;
+ }
+
+ @PostConstruct
+ public void initialize() {
+ //init the source copy process
+ //TODO actually this is probably wrong since this is now part of the DP
+ }
+
+ @PreDestroy
+ public void shutdown() {
+ //stop the source copy process
+ //TODO actually this is probably wrong since this is now part of the DP
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/AbstractJMSHibernateSearchController.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,97 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jms;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import javax.jms.JMSException;
+import javax.jms.ObjectMessage;
+import javax.jms.QueueConnection;
+import javax.jms.QueueSender;
+import javax.jms.QueueSession;
+
+import org.slf4j.Logger;
+
+import org.hibernate.HibernateException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class JMSBackendQueueProcessor implements Runnable {
+ private static final Logger log = LoggerFactory.make();
+
+ private List<LuceneWork> queue;
+ private JMSBackendQueueProcessorFactory factory;
+
+ public JMSBackendQueueProcessor(List<LuceneWork> queue,
+ JMSBackendQueueProcessorFactory jmsBackendQueueProcessorFactory) {
+ this.queue = queue;
+ this.factory = jmsBackendQueueProcessorFactory;
+ }
+
+ public void run() {
+ List<LuceneWork> filteredQueue = new ArrayList<LuceneWork>(queue);
+ for (LuceneWork work : queue) {
+ if ( work instanceof OptimizeLuceneWork ) {
+ //we don't want optimization to be propagated
+ filteredQueue.remove( work );
+ }
+ }
+ if ( filteredQueue.size() == 0) return;
+ factory.prepareJMSTools();
+ QueueConnection cnn = null;
+ QueueSender sender;
+ QueueSession session;
+ try {
+ cnn = factory.getJMSFactory().createQueueConnection();
+ //TODO make transacted parameterized
+ session = cnn.createQueueSession( false, QueueSession.AUTO_ACKNOWLEDGE );
+
+ ObjectMessage message = session.createObjectMessage();
+ message.setObject( (Serializable) filteredQueue );
+
+ sender = session.createSender( factory.getJmsQueue() );
+ sender.send( message );
+
+ session.close();
+ }
+ catch (JMSException e) {
+ throw new HibernateException( "Unable to send Search work to JMS queue: " + factory.getJmsQueueName(), e );
+ }
+ finally {
+ try {
+ if (cnn != null)
+ cnn.close();
+ }
+ catch ( JMSException e ) {
+ log.warn( "Unable to close JMS connection for " + factory.getJmsQueueName(), e );
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,146 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.jms;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Properties;
+import javax.jms.Queue;
+import javax.jms.QueueConnectionFactory;
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneWork;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class JMSBackendQueueProcessorFactory implements BackendQueueProcessorFactory {
+ private String jmsQueueName;
+ private String jmsConnectionFactoryName;
+ private static final String JNDI_PREFIX = Environment.WORKER_PREFIX + "jndi.";
+ private Properties properties;
+ private Queue jmsQueue;
+ private QueueConnectionFactory factory;
+ public static final String JMS_CONNECTION_FACTORY = Environment.WORKER_PREFIX + "jms.connection_factory";
+ public static final String JMS_QUEUE = Environment.WORKER_PREFIX + "jms.queue";
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ //TODO proper exception if jms queues and connecitons are not there
+ this.properties = props;
+ this.jmsConnectionFactoryName = props.getProperty( JMS_CONNECTION_FACTORY );
+ this.jmsQueueName = props.getProperty( JMS_QUEUE );
+ prepareJMSTools();
+ }
+
+ public Runnable getProcessor(List<LuceneWork> queue) {
+ return new JMSBackendQueueProcessor( queue, this );
+ }
+
+
+ public QueueConnectionFactory getJMSFactory() {
+ return factory;
+ }
+
+ public Queue getJmsQueue() {
+ return jmsQueue;
+ }
+
+
+ public String getJmsQueueName() {
+ return jmsQueueName;
+ }
+
+ public void prepareJMSTools() {
+ if ( jmsQueue != null && factory != null ) return;
+ try {
+ InitialContext initialContext = getInitialContext( properties );
+ factory = (QueueConnectionFactory) initialContext.lookup( jmsConnectionFactoryName );
+ jmsQueue = (Queue) initialContext.lookup( jmsQueueName );
+
+ }
+ catch (NamingException e) {
+ throw new SearchException( "Unable to lookup Search queue ("
+ + ( jmsQueueName != null ?
+ jmsQueueName :
+ "null" ) + ") and connection factory ("
+ + ( jmsConnectionFactoryName != null ?
+ jmsConnectionFactoryName :
+ "null" ) + ")",
+ e
+ );
+ }
+ }
+
+ private InitialContext getInitialContext(Properties properties) throws NamingException {
+ Properties jndiProps = getJndiProperties( properties );
+ if ( jndiProps.size() == 0 ) {
+ return new InitialContext();
+ }
+ else {
+ return new InitialContext( jndiProps );
+ }
+ }
+
+ private static Properties getJndiProperties(Properties properties) {
+
+ HashSet specialProps = new HashSet();
+ specialProps.add( JNDI_PREFIX + "class" );
+ specialProps.add( JNDI_PREFIX + "url" );
+
+ Iterator iter = properties.keySet().iterator();
+ Properties result = new Properties();
+ while ( iter.hasNext() ) {
+ String prop = (String) iter.next();
+ if ( prop.indexOf( JNDI_PREFIX ) > -1 && !specialProps.contains( prop ) ) {
+ result.setProperty(
+ prop.substring( JNDI_PREFIX.length() ),
+ properties.getProperty( prop )
+ );
+ }
+ }
+
+ String jndiClass = properties.getProperty( JNDI_PREFIX + "class" );
+ String jndiURL = properties.getProperty( JNDI_PREFIX + "url" );
+ // we want to be able to just use the defaults,
+ // if JNDI environment properties are not supplied
+ // so don't put null in anywhere
+ if ( jndiClass != null ) result.put( Context.INITIAL_CONTEXT_FACTORY, jndiClass );
+ if ( jndiURL != null ) result.put( Context.PROVIDER_URL, jndiURL );
+
+ return result;
+ }
+
+ public void close() {
+ // no need to release anything
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/jms/JMSBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/CloseIndexRunnable.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/CloseIndexRunnable.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/CloseIndexRunnable.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import org.slf4j.Logger;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Runnable to close the IndexWriter of a Workspace, if open.
+ * Used when a close operation needs scheduling for after other Runnables in an executor queue.
+ *
+ * @author Sanne Grinovero
+ */
+public class CloseIndexRunnable implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+ private final Workspace workspace;
+
+ public CloseIndexRunnable(Workspace workspace) {
+ this.workspace = workspace;
+ }
+
+ public void run() {
+ log.debug( "Closing IndexWriter if needed" );
+ workspace.closeIndexWriter();
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/CloseIndexRunnable.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * @author Sanne Grinovero
+ */
+public interface DpSelectionDelegate {
+
+ /**
+ * The LuceneWork must be applied to different indexes.
+ * @param work the work to split.
+ * @param queues the target queue to add work to.
+ * @param shardingStrategy the Sharding strategy is usually needed to identify affected Directories.
+ * @throws InterruptedException
+ */
+ void addAsPayLoadsToQueue(LuceneWork work,
+ IndexShardingStrategy shardingStrategy, PerDirectoryWorkProcessor queues) throws InterruptedException;
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionVisitor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionVisitor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionVisitor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,126 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
+import org.hibernate.search.backend.WorkVisitor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * This is the main client for IndexShardingStrategies.
+ * Only implementation of WorkVisitor<DpSelectionDelegate>,
+ * using a visitor/selector pattern for different implementations of addAsPayLoadsToQueue
+ * depending on the type of LuceneWork.
+ *
+ * @author Sanne Grinovero
+ */
+public class DpSelectionVisitor implements WorkVisitor<DpSelectionDelegate> {
+
+ private final AddSelectionDelegate addDelegate = new AddSelectionDelegate();
+ private final DeleteSelectionDelegate deleteDelegate = new DeleteSelectionDelegate();
+ private final OptimizeSelectionDelegate optimizeDelegate = new OptimizeSelectionDelegate();
+ private final PurgeAllSelectionDelegate purgeDelegate = new PurgeAllSelectionDelegate();
+
+ public DpSelectionDelegate getDelegate(AddLuceneWork addLuceneWork) {
+ return addDelegate;
+ }
+
+ public DpSelectionDelegate getDelegate(DeleteLuceneWork deleteLuceneWork) {
+ return deleteDelegate;
+ }
+
+ public DpSelectionDelegate getDelegate(OptimizeLuceneWork optimizeLuceneWork) {
+ return optimizeDelegate;
+ }
+
+ public DpSelectionDelegate getDelegate(PurgeAllLuceneWork purgeAllLuceneWork) {
+ return purgeDelegate;
+ }
+
+ private static class AddSelectionDelegate implements DpSelectionDelegate {
+
+ public void addAsPayLoadsToQueue(LuceneWork work,
+ IndexShardingStrategy shardingStrategy, PerDirectoryWorkProcessor queues) throws InterruptedException {
+ DirectoryProvider<?> provider = shardingStrategy.getDirectoryProviderForAddition(
+ work.getEntityClass(),
+ work.getId(),
+ work.getIdInString(),
+ work.getDocument()
+ );
+ queues.addWorkToDpProcessor( provider, work );
+ }
+
+ }
+
+ private static class DeleteSelectionDelegate implements DpSelectionDelegate {
+
+ public void addAsPayLoadsToQueue(LuceneWork work,
+ IndexShardingStrategy shardingStrategy, PerDirectoryWorkProcessor queues) throws InterruptedException {
+ DirectoryProvider<?>[] providers = shardingStrategy.getDirectoryProvidersForDeletion(
+ work.getEntityClass(),
+ work.getId(),
+ work.getIdInString()
+ );
+ for (DirectoryProvider<?> provider : providers) {
+ queues.addWorkToDpProcessor( provider, work );
+ }
+ }
+
+ }
+
+ private static class OptimizeSelectionDelegate implements DpSelectionDelegate {
+
+ public void addAsPayLoadsToQueue(LuceneWork work,
+ IndexShardingStrategy shardingStrategy, PerDirectoryWorkProcessor queues) throws InterruptedException {
+ DirectoryProvider<?>[] providers = shardingStrategy.getDirectoryProvidersForAllShards();
+ for (DirectoryProvider<?> provider : providers) {
+ queues.addWorkToDpProcessor( provider, work );
+ }
+ }
+
+ }
+
+ private static class PurgeAllSelectionDelegate implements DpSelectionDelegate {
+
+ public void addAsPayLoadsToQueue(LuceneWork work,
+ IndexShardingStrategy shardingStrategy, PerDirectoryWorkProcessor queues) throws InterruptedException {
+ DirectoryProvider<?>[] providers = shardingStrategy.getDirectoryProvidersForDeletion(
+ work.getEntityClass(),
+ work.getId(),
+ work.getIdInString()
+ );
+ for (DirectoryProvider<?> provider : providers) {
+ queues.addWorkToDpProcessor( provider, work );
+ }
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/DpSelectionVisitor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import java.util.List;
+import java.util.Map;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Apply the operations to Lucene directories.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author John Griffin
+ * @author Sanne Grinovero
+ */
+class LuceneBackendQueueProcessor implements Runnable {
+
+ private final List<LuceneWork> queue;
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final Map<DirectoryProvider,PerDPResources> resourcesMap;
+ private final boolean sync;
+
+ private static final DpSelectionVisitor providerSelectionVisitor = new DpSelectionVisitor();
+ private static final Logger log = LoggerFactory.make();
+
+ LuceneBackendQueueProcessor(List<LuceneWork> queue,
+ SearchFactoryImplementor searchFactoryImplementor,
+ Map<DirectoryProvider,PerDPResources> resourcesMap,
+ boolean syncMode) {
+ this.sync = syncMode;
+ this.queue = queue;
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.resourcesMap = resourcesMap;
+ }
+
+ public void run() {
+ QueueProcessors processors = new QueueProcessors( resourcesMap );
+ // divide the queue in tasks, adding to QueueProcessors by affected Directory.
+ try {
+ for ( LuceneWork work : queue ) {
+ final Class<?> entityType = work.getEntityClass();
+ DocumentBuilderIndexedEntity<?> documentBuilder = searchFactoryImplementor.getDocumentBuilderIndexedEntity( entityType );
+ IndexShardingStrategy shardingStrategy = documentBuilder.getDirectoryProviderSelectionStrategy();
+ work.getWorkDelegate( providerSelectionVisitor ).addAsPayLoadsToQueue( work, shardingStrategy, processors );
+ }
+ //this Runnable splits tasks in more runnables and then runs them:
+ processors.runAll( sync );
+ } catch (InterruptedException e) {
+ log.error( "Index update task has been interrupted", e );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessorFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessorFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessorFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,86 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.List;
+
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.impl.BatchedQueueingProcessor;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * This will actually contain the Workspace and LuceneWork visitor implementation,
+ * reused per-DirectoryProvider.
+ * Both Workspace(s) and LuceneWorkVisitor(s) lifecycle are linked to the backend
+ * lifecycle (reused and shared by all transactions).
+ * The LuceneWorkVisitor(s) are stateless, the Workspace(s) are threadsafe.
+ *
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class LuceneBackendQueueProcessorFactory implements BackendQueueProcessorFactory {
+
+ private SearchFactoryImplementor searchFactoryImp;
+
+ /**
+ * Contains the Workspace and LuceneWork visitor implementation,
+ * reused per-DirectoryProvider.
+ * Both Workspace(s) and LuceneWorkVisitor(s) lifecycle are linked to the backend
+ * lifecycle (reused and shared by all transactions);
+ * the LuceneWorkVisitor(s) are stateless, the Workspace(s) are threadsafe.
+ */
+ private final Map<DirectoryProvider,PerDPResources> resourcesMap = new HashMap<DirectoryProvider,PerDPResources>();
+
+ /**
+ * copy of BatchedQueueingProcessor.sync
+ */
+ private boolean sync;
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ this.searchFactoryImp = searchFactoryImplementor;
+ this.sync = BatchedQueueingProcessor.isConfiguredAsSync( props );
+ for (DirectoryProvider dp : searchFactoryImplementor.getDirectoryProviders() ) {
+ PerDPResources resources = new PerDPResources( searchFactoryImplementor, dp );
+ resourcesMap.put( dp, resources );
+ }
+ }
+
+ public Runnable getProcessor(List<LuceneWork> queue) {
+ return new LuceneBackendQueueProcessor( queue, searchFactoryImp, resourcesMap, sync );
+ }
+
+ public void close() {
+ // needs to stop all used ThreadPools and cleanup locks
+ for (PerDPResources res : resourcesMap.values() ) {
+ res.shutdown();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/LuceneBackendQueueProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,122 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.lucene.index.IndexWriter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.impl.lucene.works.LuceneWorkVisitor;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * A Runnable containing a unit of changes to be applied to a specific index.
+ * After creation, use addWork(LuceneWork) to fill the changes queue and then
+ * run it to apply all changes. After run() this object should be discarded.
+ * @see Runnable
+ * @see #addWork(LuceneWork)
+ * @author Sanne Grinovero
+ */
+class PerDPQueueProcessor implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+ private final Workspace workspace;
+ private final LuceneWorkVisitor worker;
+ private final ExecutorService executor;
+ private final boolean exclusiveIndexUsage;
+ private final List<LuceneWork> workOnWriter = new ArrayList<LuceneWork>();
+
+ // if any work needs batchmode, set corresponding flag to true:
+ private boolean batchmode = false;
+
+ /**
+ * @param resources All resources for the given DirectoryProvider are collected
+ * from this wrapping object.
+ */
+ public PerDPQueueProcessor(PerDPResources resources) {
+ this.worker = resources.getVisitor();
+ this.workspace = resources.getWorkspace();
+ this.executor = resources.getExecutor();
+ this.exclusiveIndexUsage = resources.isExclusiveIndexUsageEnabled();
+ }
+
+ /**
+ * adds a LuceneWork to the internal queue. Can't remove them.
+ * @param work
+ */
+ public void addWork(LuceneWork work) {
+ if ( work.isBatch() ) {
+ batchmode = true;
+ log.debug( "Batch mode enabled" );
+ }
+ workOnWriter.add( work );
+ }
+
+ /**
+ * Do all workOnWriter on an IndexWriter.
+ */
+ public void run() {
+ if ( workOnWriter.isEmpty() ) {
+ return;
+ }
+ log.debug( "Opening an IndexWriter for update" );
+ IndexWriter indexWriter = workspace.getIndexWriter( batchmode );
+ try {
+ for (LuceneWork lw : workOnWriter) {
+ lw.getWorkDelegate( worker ).performWork( lw, indexWriter );
+ }
+ workspace.commitIndexWriter();
+ performOptimizations();
+ }
+ finally {
+ if ( ! exclusiveIndexUsage )
+ workspace.closeIndexWriter();
+ }
+ }
+
+ private void performOptimizations() {
+ //TODO next line is assuming the OptimizerStrategy will need an IndexWriter;
+ // would be nicer to have the strategy put an OptimizeWork on the queue,
+ // or just return "yes please" (true) to some method?
+ //FIXME will not have a chance to trigger when no "add" activity is done.
+ // this is correct until we enable modification counts for deletions too.
+ workspace.optimizerPhase();
+ }
+
+ /**
+ * Each PerDPQueueProcessor is owned by an Executor,
+ * which contains the threads allowed to execute this.
+ * @return the Executor which should run this Runnable.
+ */
+ public ExecutorService getOwningExecutor() {
+ return executor;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPResources.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPResources.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPResources.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,79 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.impl.lucene.works.LuceneWorkVisitor;
+import org.hibernate.search.batchindexing.Executors;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+
+import java.util.concurrent.ExecutorService;
+
+/**
+ * Collects all resources needed to apply changes to one index,
+ * and are reused across several WorkQueues.
+ *
+ * @author Sanne Grinovero
+ */
+class PerDPResources {
+
+ private final ExecutorService executor;
+ private final LuceneWorkVisitor visitor;
+ private final Workspace workspace;
+ private final boolean exclusiveIndexUsage;
+
+ PerDPResources(SearchFactoryImplementor searchFactoryImp, DirectoryProvider<?> dp) {
+ workspace = new Workspace( searchFactoryImp, dp );
+ visitor = new LuceneWorkVisitor( workspace );
+ executor = Executors.newFixedThreadPool( 1, "Directory writer" );
+ exclusiveIndexUsage = searchFactoryImp.isExclusiveIndexUsageEnabled( dp );
+ }
+
+ public ExecutorService getExecutor() {
+ return executor;
+ }
+
+ public LuceneWorkVisitor getVisitor() {
+ return visitor;
+ }
+
+ public Workspace getWorkspace() {
+ return workspace;
+ }
+
+ public boolean isExclusiveIndexUsageEnabled() {
+ return exclusiveIndexUsage;
+ }
+
+ public void shutdown() {
+ //sets the index to be closed after all current jobs are processed:
+ if ( exclusiveIndexUsage ) {
+ executor.execute( new CloseIndexRunnable( workspace ) );
+ }
+ executor.shutdown();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDPResources.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDirectoryWorkProcessor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDirectoryWorkProcessor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDirectoryWorkProcessor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,42 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Interface to implement visitor pattern in combination
+ * with DpSelectionVisitor and DpSelectionDelegate to
+ * send LuceneWork to the appropriate queues, as defined
+ * by an IndexShardingStrategy.
+ *
+ * @author Sanne Grinovero
+ */
+public interface PerDirectoryWorkProcessor {
+
+ public void addWorkToDpProcessor(DirectoryProvider<?> dp, LuceneWork work) throws InterruptedException;
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/PerDirectoryWorkProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/QueueProcessors.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/QueueProcessors.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/QueueProcessors.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,117 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.FutureTask;
+
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Container used to split work by DirectoryProviders and execute
+ * them concurrently.
+ * @author Sanne Grinovero
+ */
+class QueueProcessors implements PerDirectoryWorkProcessor {
+
+ private final Map<DirectoryProvider, PerDPResources> resourcesMap;
+ private final Map<DirectoryProvider, PerDPQueueProcessor> dpProcessors = new HashMap<DirectoryProvider, PerDPQueueProcessor>();
+
+ QueueProcessors(Map<DirectoryProvider, PerDPResources> resourcesMap) {
+ this.resourcesMap = resourcesMap;
+ }
+
+ public void addWorkToDpProcessor(DirectoryProvider dp, LuceneWork work) {
+ if ( ! dpProcessors.containsKey( dp ) ) {
+ dpProcessors.put( dp, new PerDPQueueProcessor( resourcesMap.get( dp ) ) );
+ }
+ PerDPQueueProcessor processor = dpProcessors.get( dp );
+ processor.addWork ( work );
+ }
+
+ /**
+ * Run all index modifications queued so far
+ * @param sync when true this method blocks until all job is done.
+ * @throws InterruptedException only relevant when sync is true.
+ */
+ void runAll(boolean sync) throws InterruptedException {
+ if ( sync ) {
+ runAllWaiting();
+ }
+ else {
+ runAllAsync();
+ }
+ }
+
+ /**
+ * Runs all PerDPQueueProcessor and don't wait for them to finish.
+ */
+ private void runAllAsync() {
+ // execute all work in parallel on each DirectoryProvider;
+ // each DP has it's own ExecutorService.
+ for ( PerDPQueueProcessor process : dpProcessors.values() ) {
+ ExecutorService executor = process.getOwningExecutor();
+ executor.execute( process );
+ }
+ }
+
+ /**
+ * Runs all PerDPQueueProcessor and waits until all have been processed.
+ * @throws InterruptedException
+ */
+ private void runAllWaiting() throws InterruptedException {
+ List<Future<Object>> futures = new ArrayList<Future<Object>>( dpProcessors.size() );
+ // execute all work in parallel on each DirectoryProvider;
+ // each DP has it's own ExecutorService.
+ for ( PerDPQueueProcessor process : dpProcessors.values() ) {
+ ExecutorService executor = process.getOwningExecutor();
+ //wrap each Runnable in a Future
+ FutureTask<Object> f = new FutureTask<Object>( process, null );
+ futures.add( f );
+ executor.execute( f );
+ }
+ // and then wait for all tasks to be finished:
+ for ( Future<Object> f : futures ) {
+ if ( !f.isDone() ) {
+ try {
+ f.get();
+ } catch(CancellationException ignore) {
+ //ignored, as in java.util.concurrent.AbstractExecutorService.invokeAll(Collection<Callable<T>> tasks)
+ } catch(ExecutionException ignore) {
+ //ignored, as in java.util.concurrent.AbstractExecutorService.invokeAll(Collection<Callable<T>> tasks)
+ }
+ }
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/QueueProcessors.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,121 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.ScopedAnalyzer;
+
+/**
+ * Stateless implementation that performs an <code>AddLuceneWork</code>.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author John Griffin
+ * @author Sanne Grinovero
+ * @see LuceneWorkVisitor
+ * @see LuceneWorkDelegate
+ */
+class AddWorkDelegate implements LuceneWorkDelegate {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final Workspace workspace;
+
+ AddWorkDelegate(Workspace workspace) {
+ this.workspace = workspace;
+ }
+
+ public void performWork(LuceneWork work, IndexWriter writer) {
+ final Class<?> entityType = work.getEntityClass();
+ @SuppressWarnings("unchecked")
+ DocumentBuilderIndexedEntity documentBuilder = workspace.getDocumentBuilder( entityType );
+ Map<String, String> fieldToAnalyzerMap = ( ( AddLuceneWork ) work ).getFieldToAnalyzerMap();
+ ScopedAnalyzer analyzer = ( ScopedAnalyzer ) documentBuilder.getAnalyzer();
+ analyzer = updateAnalyzerMappings( analyzer, fieldToAnalyzerMap );
+ if ( log.isTraceEnabled() ) {
+ log.trace(
+ "add to Lucene index: {}#{}:{}",
+ new Object[] { entityType, work.getId(), work.getDocument() }
+ );
+ }
+ try {
+ writer.addDocument( work.getDocument(), analyzer );
+ workspace.incrementModificationCounter( 1 );
+ }
+ catch ( IOException e ) {
+ throw new SearchException(
+ "Unable to add to Lucene index: "
+ + entityType + "#" + work.getId(), e
+ );
+ }
+ }
+
+ /**
+ * Allows to override the otherwise static field to analyzer mapping in <code>scopedAnalyzer</code>.
+ *
+ * @param scopedAnalyzer The scoped analyzer created at startup time.
+ * @param fieldToAnalyzerMap A map of <code>Document</code> field names for analyzer names. This map gets creates
+ * when the Lucene <code>Document</code> gets created and uses the state of the entity to index to determine analyzers
+ * dynamically at index time.
+ * @return <code>scopedAnalyzer</code> in case <code>fieldToAnalyzerMap</code> is <code>null</code> or empty. Otherwise
+ * a clone of <code>scopedAnalyzer</code> is created where the analyzers get overriden according to <code>fieldToAnalyzerMap</code>.
+ */
+ private ScopedAnalyzer updateAnalyzerMappings(ScopedAnalyzer scopedAnalyzer, Map<String, String> fieldToAnalyzerMap) {
+ // for backwards compatibility
+ if ( fieldToAnalyzerMap == null || fieldToAnalyzerMap.isEmpty() ) {
+ return scopedAnalyzer;
+ }
+
+ ScopedAnalyzer analyzerClone = scopedAnalyzer.clone();
+ for ( Map.Entry<String, String> entry : fieldToAnalyzerMap.entrySet() ) {
+ Analyzer analyzer = workspace.getAnalyzer( entry.getValue() );
+ if ( analyzer == null ) {
+ log.warn( "Unable to retrieve named analyzer: " + entry.getValue() );
+ }
+ else {
+ analyzerClone.addScopedAnalyzer( entry.getKey(), analyzer );
+ }
+ }
+ return analyzerClone;
+ }
+
+ public void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor) {
+ monitor.documentsAdded( 1 );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,81 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import java.io.Serializable;
+
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Stateless extension of <code>DeleteLuceneWork</code>,
+ * performing the same LuceneWork in an optimal way in case
+ * the index is NOT shared across different entities
+ * (which is the default).
+ *
+ * @author Sanne Grinovero
+ * @see DeleteWorkDelegate
+ */
+public class DeleteExtWorkDelegate extends DeleteWorkDelegate {
+
+ private final Class<?> managedType;
+ private final DocumentBuilderIndexedEntity<?> builder;
+ private final Logger log = LoggerFactory.make();
+
+ DeleteExtWorkDelegate(Workspace workspace) {
+ super( workspace );
+ managedType = workspace.getEntitiesInDirectory().iterator().next();
+ builder = workspace.getDocumentBuilder( managedType );
+ }
+
+ @Override
+ public void performWork(LuceneWork work, IndexWriter writer) {
+ checkType( work );
+ Serializable id = work.getId();
+ log.trace( "Removing {}#{} by id using an IndexWriter.", managedType, id );
+ Term idTerm = builder.getTerm( id );
+ try {
+ writer.deleteDocuments( idTerm );
+ }
+ catch ( Exception e ) {
+ String message = "Unable to remove " + managedType + "#" + id + " from index.";
+ throw new SearchException( message, e );
+ }
+ }
+
+ private void checkType(final LuceneWork work) {
+ if ( work.getEntityClass() != managedType ) {
+ throw new AssertionFailure( "Unexpected type" );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import java.io.Serializable;
+
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.TermQuery;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Stateless implementation that performs a <code>DeleteLuceneWork</code>.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author John Griffin
+ * @author Sanne Grinovero
+ * @see LuceneWorkVisitor
+ * @see LuceneWorkDelegate
+ */
+class DeleteWorkDelegate implements LuceneWorkDelegate {
+
+ private static final Logger log = LoggerFactory.make();
+ private final Workspace workspace;
+
+ DeleteWorkDelegate(Workspace workspace) {
+ this.workspace = workspace;
+ }
+
+ public void performWork(LuceneWork work, IndexWriter writer) {
+ final Class<?> entityType = work.getEntityClass();
+ final Serializable id = work.getId();
+ log.trace( "Removing {}#{} by query.", entityType, id );
+ DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder( entityType );
+
+ BooleanQuery entityDeletionQuery = new BooleanQuery();
+
+ TermQuery idQueryTerm = new TermQuery( builder.getTerm( id ) );
+ entityDeletionQuery.add( idQueryTerm, BooleanClause.Occur.MUST );
+
+ Term classNameQueryTerm = new Term( DocumentBuilder.CLASS_FIELDNAME, entityType.getName() );
+ TermQuery classNameQuery = new TermQuery( classNameQueryTerm );
+ entityDeletionQuery.add( classNameQuery, BooleanClause.Occur.MUST );
+
+ try {
+ writer.deleteDocuments( entityDeletionQuery );
+ }
+ catch ( Exception e ) {
+ String message = "Unable to remove " + entityType + "#" + id + " from index.";
+ throw new SearchException( message, e );
+ }
+ }
+
+ public void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor) {
+ // TODO Auto-generated method stub
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import org.apache.lucene.index.IndexWriter;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+
+/**
+ * @author Sanne Grinovero
+ */
+public interface LuceneWorkDelegate {
+
+ /**
+ * Will perform work on an IndexWriter.
+ * @param work the LuceneWork to apply to the IndexWriter.
+ * @param writer the IndexWriter to use.
+ * @throws UnsupportedOperationException when the work is not compatible with an IndexWriter.
+ */
+ void performWork(LuceneWork work, IndexWriter writer);
+
+ /**
+ * Used for stats and performance counters, use the monitor
+ * to keep track of activity done on the index.
+ * @param work the work which was done.
+ * @param monitor the monitor tracking activity.
+ */
+ void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkVisitor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkVisitor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkVisitor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,72 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
+import org.hibernate.search.backend.WorkVisitor;
+import org.hibernate.search.backend.Workspace;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class LuceneWorkVisitor implements WorkVisitor<LuceneWorkDelegate> {
+
+ private final AddWorkDelegate addDelegate;
+ private final DeleteWorkDelegate deleteDelegate;
+ private final OptimizeWorkDelegate optimizeDelegate;
+ private final PurgeAllWorkDelegate purgeAllDelegate;
+
+ public LuceneWorkVisitor(Workspace workspace) {
+ if ( workspace.getEntitiesInDirectory().size() == 1 ) {
+ this.deleteDelegate = new DeleteExtWorkDelegate( workspace );
+ }
+ else {
+ this.deleteDelegate = new DeleteWorkDelegate( workspace );
+ }
+ this.purgeAllDelegate = new PurgeAllWorkDelegate();
+ this.addDelegate = new AddWorkDelegate( workspace );
+ this.optimizeDelegate = new OptimizeWorkDelegate( workspace );
+ }
+
+ public LuceneWorkDelegate getDelegate(AddLuceneWork addLuceneWork) {
+ return addDelegate;
+ }
+
+ public LuceneWorkDelegate getDelegate(DeleteLuceneWork deleteLuceneWork) {
+ return deleteDelegate;
+ }
+
+ public LuceneWorkDelegate getDelegate(OptimizeLuceneWork optimizeLuceneWork) {
+ return optimizeDelegate;
+ }
+
+ public LuceneWorkDelegate getDelegate(PurgeAllLuceneWork purgeAllLuceneWork) {
+ return purgeAllDelegate;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkVisitor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexWriter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Stateless implementation that performs a OptimizeLuceneWork.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author John Griffin
+ * @author Sanne Grinovero
+ * @see LuceneWorkVisitor
+ * @see LuceneWorkDelegate
+ */
+class OptimizeWorkDelegate implements LuceneWorkDelegate {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final Workspace workspace;
+
+ OptimizeWorkDelegate(Workspace workspace) {
+ this.workspace = workspace;
+ }
+
+ public void performWork(LuceneWork work, IndexWriter writer) {
+ final Class<?> entityType = work.getEntityClass();
+ log.trace( "optimize Lucene index: {}", entityType );
+ try {
+ writer.optimize();
+ workspace.optimize();
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to optimize Lucene index: " + entityType, e );
+ }
+ }
+
+ public void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor) {
+ // TODO Auto-generated method stub
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.backend.impl.lucene.works;
+
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+* Stateless implementation that performs a PurgeAllLuceneWork.
+* @see LuceneWorkVisitor
+* @see LuceneWorkDelegate
+* @author Emmanuel Bernard
+* @author Hardy Ferentschik
+* @author John Griffin
+* @author Sanne Grinovero
+*/
+class PurgeAllWorkDelegate implements LuceneWorkDelegate {
+
+ private static final Logger log = LoggerFactory.make();
+
+ PurgeAllWorkDelegate() {
+ }
+
+ public void performWork(LuceneWork work, IndexWriter writer) {
+ final Class<?> entityType = work.getEntityClass();
+ log.trace( "purgeAll Lucene index using IndexWriter for type: {}", entityType );
+ try {
+ Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, entityType.getName() );
+ writer.deleteDocuments( term );
+ }
+ catch (Exception e) {
+ throw new SearchException( "Unable to purge all from Lucene index: " + entityType, e );
+ }
+ }
+
+ public void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor) {
+ // TODO Auto-generated method stub
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchCoordinator.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchCoordinator.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchCoordinator.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,160 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.hibernate.CacheMode;
+import org.hibernate.SessionFactory;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Makes sure that several different BatchIndexingWorkspace(s)
+ * can be started concurrently, sharing the same batch-backend
+ * and IndexWriters.
+ *
+ * @author Sanne Grinovero
+ */
+public class BatchCoordinator implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final Class<?>[] rootEntities; //entity types to reindex exluding all subtypes of each-other
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final SessionFactory sessionFactory;
+ private final int objectLoadingThreads;
+ private final int collectionLoadingThreads;
+ private final CacheMode cacheMode;
+ private final int objectLoadingBatchSize;
+ private final boolean optimizeAtEnd;
+ private final boolean purgeAtStart;
+ private final boolean optimizeAfterPurge;
+ private final CountDownLatch endAllSignal;
+ private final MassIndexerProgressMonitor monitor;
+ private final int objectsLimit;
+
+ private BatchBackend backend;
+
+ public BatchCoordinator(Set<Class<?>> rootEntities,
+ SearchFactoryImplementor searchFactoryImplementor,
+ SessionFactory sessionFactory, int objectLoadingThreads,
+ int collectionLoadingThreads, CacheMode cacheMode,
+ int objectLoadingBatchSize, int objectsLimit,
+ boolean optimizeAtEnd,
+ boolean purgeAtStart, boolean optimizeAfterPurge,
+ MassIndexerProgressMonitor monitor) {
+ this.rootEntities = rootEntities.toArray( new Class<?>[ rootEntities.size() ] );
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.sessionFactory = sessionFactory;
+ this.objectLoadingThreads = objectLoadingThreads;
+ this.collectionLoadingThreads = collectionLoadingThreads;
+ this.cacheMode = cacheMode;
+ this.objectLoadingBatchSize = objectLoadingBatchSize;
+ this.optimizeAtEnd = optimizeAtEnd;
+ this.purgeAtStart = purgeAtStart;
+ this.optimizeAfterPurge = optimizeAfterPurge;
+ this.monitor = monitor;
+ this.objectsLimit = objectsLimit;
+ this.endAllSignal = new CountDownLatch( rootEntities.size() );
+ }
+
+ public void run() {
+ backend = searchFactoryImplementor.makeBatchBackend( monitor );
+ try {
+ beforeBatch(); // purgeAll and pre-optimize activities
+ doBatchWork();
+ backend.stopAndFlush( 60L*60*24, TimeUnit.SECONDS ); //1 day : enough to flush to indexes?
+// backend.stopAndFlush( 10, TimeUnit.SECONDS );
+ afterBatch();
+ } catch (InterruptedException e) {
+ log.error( "Batch indexing was interrupted" );
+ Thread.currentThread().interrupt();
+ }
+ finally {
+ backend.close();
+ }
+ }
+
+ /**
+ * Will spawn a thread for each type in rootEntities, they will all re-join
+ * on endAllSignal when finished.
+ * @throws InterruptedException if interrupted while waiting for endAllSignal.
+ */
+ private void doBatchWork() throws InterruptedException {
+ ExecutorService executor = Executors.newFixedThreadPool( rootEntities.length, "BatchIndexingWorkspace" );
+ for ( Class<?> type : rootEntities ) {
+ executor.execute( new BatchIndexingWorkspace(
+ searchFactoryImplementor, sessionFactory, type,
+ objectLoadingThreads, collectionLoadingThreads,
+ cacheMode, objectLoadingBatchSize,
+ endAllSignal, monitor, backend, objectsLimit ) );
+ }
+ executor.shutdown();
+ endAllSignal.await(); //waits for the executor to finish
+ }
+
+ /**
+ * Operations to do after all subthreads finished their work on index
+ */
+ private void afterBatch() {
+ if ( this.optimizeAtEnd ) {
+ Set<Class<?>> targetedClasses = searchFactoryImplementor.getIndexedTypesPolymorphic( rootEntities );
+ optimize( targetedClasses );
+ }
+ }
+
+ /**
+ * Optional operations to do before the multiple-threads start indexing
+ */
+ private void beforeBatch() {
+ if ( this.purgeAtStart ) {
+ //purgeAll for affected entities
+ Set<Class<?>> targetedClasses = searchFactoryImplementor.getIndexedTypesPolymorphic( rootEntities );
+ for ( Class<?> clazz : targetedClasses ) {
+ //needs do be in-sync work to make sure we wait for the end of it.
+ backend.doWorkInSync( new PurgeAllLuceneWork( clazz ) );
+ }
+ if ( this.optimizeAfterPurge ) {
+ optimize( targetedClasses );
+ }
+ }
+ }
+
+ private void optimize(Set<Class<?>> targetedClasses) {
+ for ( Class<?> clazz : targetedClasses ) {
+ //TODO the backend should remove duplicate optimize work to the same DP (as entities might share indexes)
+ backend.doWorkInSync( new OptimizeLuceneWork( clazz ) );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchCoordinator.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchIndexingWorkspace.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchIndexingWorkspace.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchIndexingWorkspace.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,159 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ThreadPoolExecutor;
+
+import org.hibernate.CacheMode;
+import org.hibernate.SessionFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * This runnable will prepare a pipeline for batch indexing
+ * of entities, managing the lifecycle of several ThreadPools.
+ *
+ * @author Sanne Grinovero
+ */
+public class BatchIndexingWorkspace implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final SearchFactoryImplementor searchFactory;
+ private final SessionFactory sessionFactory;
+
+ //following order shows the 4 stages of an entity flowing to the index:
+ private final ThreadPoolExecutor execIdentifiersLoader;
+ private final ProducerConsumerQueue<List<Serializable>> fromIdentifierListToEntities;
+ private final ThreadPoolExecutor execFirstLoader;
+ private final ProducerConsumerQueue<Object> fromEntityToAddwork;
+ private final ThreadPoolExecutor execDocBuilding;
+
+ private final int objectLoadingThreadNum;
+ private final int luceneworkerBuildingThreadNum;
+ private final Class<?> indexedType;
+
+ // status control
+ private final CountDownLatch producerEndSignal; //released when we stop adding Documents to Index
+ private final CountDownLatch endAllSignal; //released when we release all locks and IndexWriter
+
+ // progress monitor
+ private final MassIndexerProgressMonitor monitor;
+
+ // loading options
+ private final CacheMode cacheMode;
+ private final int objectLoadingBatchSize;
+
+ private final BatchBackend backend;
+
+ private final int objectsLimit;
+
+ public BatchIndexingWorkspace(SearchFactoryImplementor searchFactoryImplementor, SessionFactory sessionFactory,
+ Class<?> entityType,
+ int objectLoadingThreads, int collectionLoadingThreads,
+ CacheMode cacheMode, int objectLoadingBatchSize,
+ CountDownLatch endAllSignal,
+ MassIndexerProgressMonitor monitor, BatchBackend backend,
+ int objectsLimit) {
+
+ this.indexedType = entityType;
+ this.searchFactory = searchFactoryImplementor;
+ this.sessionFactory = sessionFactory;
+
+ //thread pool sizing:
+ this.objectLoadingThreadNum = objectLoadingThreads;
+ this.luceneworkerBuildingThreadNum = collectionLoadingThreads;//collections are loaded as needed by building the document
+
+ //loading options:
+ this.cacheMode = cacheMode;
+ this.objectLoadingBatchSize = objectLoadingBatchSize;
+ this.backend = backend;
+
+ //executors: (quite expensive constructor)
+ //execIdentifiersLoader has size 1 and is not configurable: ensures the list is consistent as produced by one transaction
+ this.execIdentifiersLoader = Executors.newFixedThreadPool( 1, "identifierloader" );
+ this.execFirstLoader = Executors.newFixedThreadPool( objectLoadingThreadNum, "entityloader" );
+ this.execDocBuilding = Executors.newFixedThreadPool( luceneworkerBuildingThreadNum, "collectionsloader" );
+
+ //pipelining queues:
+ this.fromIdentifierListToEntities = new ProducerConsumerQueue<List<Serializable>>( 1 );
+ this.fromEntityToAddwork = new ProducerConsumerQueue<Object>( objectLoadingThreadNum );
+
+ //end signal shared with other instances:
+ this.endAllSignal = endAllSignal;
+ this.producerEndSignal = new CountDownLatch( luceneworkerBuildingThreadNum );
+
+ this.monitor = monitor;
+ this.objectsLimit = objectsLimit;
+ }
+
+ public void run() {
+ try {
+
+ //first start the consumers, then the producers (reverse order):
+ for ( int i=0; i < luceneworkerBuildingThreadNum; i++ ) {
+ //from entity to LuceneWork:
+ execDocBuilding.execute( new EntityConsumerLuceneworkProducer(
+ fromEntityToAddwork, monitor,
+ sessionFactory, producerEndSignal, searchFactory,
+ cacheMode, backend) );
+ }
+ for ( int i=0; i < objectLoadingThreadNum; i++ ) {
+ //from primary key to loaded entity:
+ execFirstLoader.execute( new IdentifierConsumerEntityProducer(
+ fromIdentifierListToEntities, fromEntityToAddwork, monitor,
+ sessionFactory, cacheMode, indexedType) );
+ }
+ //from class definition to all primary keys:
+ execIdentifiersLoader.execute( new IdentifierProducer(
+ fromIdentifierListToEntities, sessionFactory,
+ objectLoadingBatchSize, indexedType, monitor,
+ objectsLimit ) );
+
+ //shutdown all executors:
+ execIdentifiersLoader.shutdown();
+ execFirstLoader.shutdown();
+ execDocBuilding.shutdown();
+ try {
+ producerEndSignal.await(); //await for all work being sent to the backend
+ log.debug( "All work for type {} has been produced", indexedType.getName() );
+ } catch (InterruptedException e) {
+ //restore interruption signal:
+ Thread.currentThread().interrupt();
+ throw new SearchException( "Interrupted on batch Indexing; index will be left in unknown state!", e );
+ }
+ }
+ finally {
+ endAllSignal.countDown();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/BatchIndexingWorkspace.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/EntityConsumerLuceneworkProducer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/EntityConsumerLuceneworkProducer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/EntityConsumerLuceneworkProducer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,140 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.io.Serializable;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+
+import org.hibernate.CacheMode;
+import org.hibernate.FlushMode;
+import org.hibernate.Hibernate;
+import org.hibernate.LockMode;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.bridge.TwoWayFieldBridge;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Component of batch-indexing pipeline, using chained producer-consumers.
+ * This Runnable will consume entities taken one-by-one from the queue
+ * and produce for each entity an AddLuceneWork to the output queue.
+ *
+ * @author Sanne Grinovero
+ */
+public class EntityConsumerLuceneworkProducer implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final ProducerConsumerQueue<Object> source;
+ private final SessionFactory sessionFactory;
+ private final Map<Class<?>, DocumentBuilderIndexedEntity<?>> documentBuilders;
+ private final MassIndexerProgressMonitor monitor;
+
+ private static final int CLEAR_PERIOD = 50;
+ private final CacheMode cacheMode;
+
+ private final CountDownLatch producerEndSignal;
+
+ private final BatchBackend backend;
+
+ public EntityConsumerLuceneworkProducer(
+ ProducerConsumerQueue<Object> entitySource,
+ MassIndexerProgressMonitor monitor,
+ SessionFactory sessionFactory,
+ CountDownLatch producerEndSignal,
+ SearchFactoryImplementor searchFactory, CacheMode cacheMode, BatchBackend backend) {
+ this.source = entitySource;
+ this.monitor = monitor;
+ this.sessionFactory = sessionFactory;
+ this.producerEndSignal = producerEndSignal;
+ this.cacheMode = cacheMode;
+ this.backend = backend;
+ this.documentBuilders = searchFactory.getDocumentBuildersIndexedEntities();
+ }
+
+ public void run() {
+ Session session = sessionFactory.openSession();
+ session.setFlushMode( FlushMode.MANUAL );
+ session.setCacheMode( cacheMode );
+ try {
+ Transaction transaction = session.beginTransaction();
+ indexAllQueue( session );
+ transaction.commit();
+ }
+ finally {
+ producerEndSignal.countDown();
+ session.close();
+ }
+ log.debug( "finished" );
+ }
+
+ private void indexAllQueue(Session session) {
+ try {
+ for ( int cycle=0; true; cycle++ ) {
+ Object take = source.take();
+ if ( take == null ) {
+ break;
+ }
+ else {
+ log.trace( "received an object {}", take );
+ //trick to attach the objects to session:
+ session.lock( take, LockMode.NONE );
+ index( take, session );
+ monitor.documentsBuilt( 1 );
+ session.evict( take );
+ if ( cycle == CLEAR_PERIOD ) {
+ cycle = 0;
+ session.clear();
+ }
+ }
+ }
+ }
+ catch (InterruptedException e) {
+ // just quit
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void index( Object entity, Session session ) throws InterruptedException {
+ Serializable id = session.getIdentifier( entity );
+ Class clazz = Hibernate.getClass( entity );
+ DocumentBuilderIndexedEntity docBuilder = documentBuilders.get( clazz );
+ TwoWayFieldBridge idBridge = docBuilder.getIdBridge();
+ String idInString = idBridge.objectToString( id );
+ //depending on the complexity of the object graph going to be indexed it's possible
+ //that we hit the database several times during work construction.
+ AddLuceneWork addWork = docBuilder.createAddWork( clazz, entity, id, idInString, true );
+ backend.enqueueAsyncWork( addWork );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/EntityConsumerLuceneworkProducer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/Executors.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/Executors.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/Executors.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,101 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Helper class to create threads;
+ * these threads are grouped and named to be identified in a profiler.
+ *
+ * @author Sanne Grinovero
+ */
+public class Executors {
+
+ private static final String THREAD_GROUP_PREFIX = "Hibernate Search: ";
+ private static final int QUEUE_MAX_LENGTH = 1000; //TODO have it configurable?
+
+ /**
+ * Creates a new fixed size ThreadPoolExecutor.
+ * It's using a blockingqueue of maximum 1000 elements and the rejection
+ * policy is set to CallerRunsPolicy for the case the queue is full.
+ * These settings are required to cap the queue, to make sure the
+ * timeouts are reasonable for most jobs.
+ *
+ * @param threads the number of threads
+ * @param groupname a label to identify the threadpool; useful for profiling.
+ * @return the new ExecutorService
+ */
+ public static ThreadPoolExecutor newFixedThreadPool(int threads, String groupname) {
+ return newFixedThreadPool( threads, groupname, QUEUE_MAX_LENGTH );
+ }
+
+ /**
+ * Creates a new fixed size ThreadPoolExecutor
+ * @param threads the number of threads
+ * @param groupname a label to identify the threadpool; useful for profiling.
+ * @param queueSize the size of the queue to store Runnables when all threads are busy
+ * @return the new ExecutorService
+ */
+ public static ThreadPoolExecutor newFixedThreadPool(int threads, String groupname, int queueSize) {
+ return new ThreadPoolExecutor(
+ threads,
+ threads,
+ 0L, TimeUnit.MILLISECONDS,
+ new LinkedBlockingQueue<Runnable>( queueSize ),
+ new SearchThreadFactory( groupname ),
+ new ThreadPoolExecutor.CallerRunsPolicy() );
+ }
+
+ /**
+ * The thread factory, used to customize thread names
+ */
+ private static class SearchThreadFactory implements ThreadFactory {
+
+ final ThreadGroup group;
+ final AtomicInteger threadNumber = new AtomicInteger( 1 );
+ final String namePrefix;
+
+ SearchThreadFactory(String groupname) {
+ SecurityManager s = System.getSecurityManager();
+ group = ( s != null ) ? s.getThreadGroup() :
+ Thread.currentThread().getThreadGroup();
+ namePrefix = THREAD_GROUP_PREFIX + groupname + "-";
+ }
+
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread( group, r,
+ namePrefix + threadNumber.getAndIncrement(),
+ 0 );
+ return t;
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/Executors.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierConsumerEntityProducer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierConsumerEntityProducer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierConsumerEntityProducer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,138 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.hibernate.CacheMode;
+import org.hibernate.Criteria;
+import org.hibernate.FlushMode;
+import org.hibernate.LockMode;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * This Runnable is consuming entity identifiers and
+ * producing loaded detached entities for the next queue.
+ * It will finish when the queue it's consuming from will
+ * signal there are no more identifiers.
+ *
+ * @author Sanne Grinovero
+ */
+public class IdentifierConsumerEntityProducer implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final ProducerConsumerQueue<List<Serializable>> source;
+ private final ProducerConsumerQueue<Object> destination;
+ private final SessionFactory sessionFactory;
+ private final CacheMode cacheMode;
+ private final Class<?> type;
+ private final MassIndexerProgressMonitor monitor;
+
+ public IdentifierConsumerEntityProducer(
+ ProducerConsumerQueue<List<Serializable>> fromIdentifierListToEntities,
+ ProducerConsumerQueue<Object> fromEntityToAddwork,
+ MassIndexerProgressMonitor monitor,
+ SessionFactory sessionFactory,
+ CacheMode cacheMode, Class<?> type) {
+ this.source = fromIdentifierListToEntities;
+ this.destination = fromEntityToAddwork;
+ this.monitor = monitor;
+ this.sessionFactory = sessionFactory;
+ this.cacheMode = cacheMode;
+ this.type = type;
+ log.trace( "created" );
+ }
+
+ public void run() {
+ log.trace( "started" );
+ Session session = sessionFactory.openSession();
+ session.setFlushMode( FlushMode.MANUAL );
+ session.setCacheMode( cacheMode );
+ try {
+ Transaction transaction = session.beginTransaction();
+ loadAllFromQueue( session );
+ transaction.commit();
+ }
+ finally {
+ session.close();
+ }
+ log.trace( "finished" );
+ }
+
+ private void loadAllFromQueue(Session session) {
+ try {
+ Object take;
+ do {
+ take = source.take();
+ if ( take != null ) {
+ @SuppressWarnings("unchecked")
+ List<Serializable> listIds = (List<Serializable>) take;
+ log.trace( "received list of ids {}", listIds );
+ loadList( listIds, session );
+ }
+ }
+ while ( take != null );
+ }
+ catch (InterruptedException e) {
+ // just quit
+ Thread.currentThread().interrupt();
+ }
+ finally {
+ destination.producerStopping();
+ }
+ }
+
+ /**
+ * Loads a list of entities of defined type using their identifiers.
+ * The loaded objects are then pushed to the next queue one by one.
+ * @param listIds the list of entity identifiers (of type
+ * @param session the session to be used
+ * @throws InterruptedException
+ */
+ private void loadList(List<Serializable> listIds, Session session) throws InterruptedException {
+ //TODO investigate if I should use ObjectLoaderHelper.initializeObjects instead
+ Criteria criteria = session
+ .createCriteria( type )
+ .setCacheMode( cacheMode )
+ .setLockMode( LockMode.NONE )
+ .setCacheable( false )
+ .setFlushMode( FlushMode.MANUAL )
+ .add( Restrictions.in( "id", listIds ) );
+ List<?> list = criteria.list();
+ monitor.entitiesLoaded( list.size() );
+ session.clear();
+ for ( Object obj : list ) {
+ destination.put( obj );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierConsumerEntityProducer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierProducer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierProducer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierProducer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,162 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.Criteria;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hibernate.SessionFactory;
+import org.hibernate.StatelessSession;
+import org.hibernate.Transaction;
+import org.hibernate.criterion.Projections;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * This Runnable is going to feed the indexing queue
+ * with the identifiers of all the entities going to be indexed.
+ * This step in the indexing process is not parallel (should be
+ * done by one thread per type) so that a single transaction is used
+ * to define the group of entities to be indexed.
+ * Produced identifiers are put in the destination queue grouped in List
+ * instances: the reason for this is to load them in batches
+ * in the next step and reduce contention on the queue.
+ *
+ * @author Sanne Grinovero
+ */
+public class IdentifierProducer implements Runnable {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final ProducerConsumerQueue<List<Serializable>> destination;
+ private final SessionFactory sessionFactory;
+ private final int batchSize;
+ private final Class<?> indexedType;
+ private final MassIndexerProgressMonitor monitor;
+
+ private final int objectsLimit;
+
+ /**
+ * @param fromIdentifierListToEntities the target queue where the produced identifiers are sent to
+ * @param sessionFactory the Hibernate SessionFactory to use to load entities
+ * @param objectLoadingBatchSize affects mostly the next consumer: IdentifierConsumerEntityProducer
+ * @param indexedType the entity type to be loaded
+ * @param monitor to monitor indexing progress
+ * @param objectsLimit if not zero
+ */
+ public IdentifierProducer(
+ ProducerConsumerQueue<List<Serializable>> fromIdentifierListToEntities,
+ SessionFactory sessionFactory,
+ int objectLoadingBatchSize,
+ Class<?> indexedType, MassIndexerProgressMonitor monitor,
+ int objectsLimit) {
+ this.destination = fromIdentifierListToEntities;
+ this.sessionFactory = sessionFactory;
+ this.batchSize = objectLoadingBatchSize;
+ this.indexedType = indexedType;
+ this.monitor = monitor;
+ this.objectsLimit = objectsLimit;
+ log.trace( "created" );
+ }
+
+ public void run() {
+ log.trace( "started" );
+ try {
+ inTransactionWrapper();
+ }
+ finally{
+ destination.producerStopping();
+ }
+ log.trace( "finished" );
+ }
+
+ private void inTransactionWrapper() {
+ StatelessSession session = sessionFactory.openStatelessSession();
+ try {
+ Transaction transaction = session.beginTransaction();
+ loadAllIdentifiers( session );
+ transaction.commit();
+ } catch (InterruptedException e) {
+ // just quit
+ }
+ finally {
+ session.close();
+ }
+ }
+
+ private void loadAllIdentifiers(final StatelessSession session) throws InterruptedException {
+ Long totalCount = (Long) session
+ .createCriteria( indexedType )
+ .setProjection( Projections.count( "id" ) )
+ .setCacheable( false )
+ .uniqueResult();
+
+ if ( objectsLimit != 0 && objectsLimit < totalCount.intValue() ) {
+ totalCount = Long.valueOf( objectsLimit );
+ }
+ log.debug( "going to fetch {} primary keys", totalCount);
+ monitor.addToTotalCount( totalCount );
+
+ Criteria criteria = session
+ .createCriteria( indexedType )
+ .setProjection( Projections.id() )
+ .setCacheable( false )
+ .setFetchSize( 100 );
+
+ ScrollableResults results = criteria.scroll( ScrollMode.FORWARD_ONLY );
+ ArrayList<Serializable> destinationList = new ArrayList<Serializable>( batchSize );
+ int counter = 0;
+ try {
+ while ( results.next() ) {
+ Serializable id = (Serializable) results.get( 0 );
+ destinationList.add( id );
+ if ( destinationList.size() == batchSize ) {
+ enqueueList( destinationList );
+ destinationList = new ArrayList<Serializable>( batchSize );
+ }
+ counter++;
+ if ( counter == totalCount ) {
+ break;
+ }
+ }
+ }
+ finally {
+ results.close();
+ }
+ enqueueList( destinationList );
+ }
+
+ private void enqueueList(final List<Serializable> idsList) throws InterruptedException {
+ if ( ! idsList.isEmpty() ) {
+ destination.put( idsList );
+ log.trace( "produced a list of ids {}", idsList );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/IdentifierProducer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/MassIndexerProgressMonitor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/MassIndexerProgressMonitor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/MassIndexerProgressMonitor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+/**
+ * As a MassIndexer can take some time to finish it's job,
+ * a MassIndexerProgressMonitor can be defined in the configuration
+ * property hibernate.search.worker.indexing.monitor
+ * implementing this interface to track indexing performance.
+ *
+ * Implementors must:
+ * be threadsafe
+ * have a no-arg constructor.
+ *
+ * @author Sanne Grinovero
+ */
+public interface MassIndexerProgressMonitor {
+
+ /**
+ * The number of documents sent to the backend;
+ * This is called several times during
+ * the indexing process.
+ * @param increment
+ */
+ void documentsAdded(long increment);
+
+ /**
+ * The number of Documents built;
+ * This is called several times and concurrently during
+ * the indexing process.
+ * @param number
+ */
+ void documentsBuilt(int number);
+
+ /**
+ * The number of entities loaded from database;
+ * This is called several times and concurrently during
+ * the indexing process.
+ * @param size
+ */
+ void entitiesLoaded(int size);
+
+ /**
+ * The total count of entities to be indexed is
+ * added here; It could be called more than once,
+ * the implementation should add them up.
+ * This is called several times and concurrently during
+ * the indexing process.
+ * @param count
+ */
+ void addToTotalCount(long count);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/MassIndexerProgressMonitor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/ProducerConsumerQueue.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/ProducerConsumerQueue.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/ProducerConsumerQueue.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,112 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.batchindexing;
+
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Implements a blocking queue capable of storing
+ * a "poison" token to signal consumer threads
+ * that the task is finished.
+ *
+ * @author Sanne Grinovero
+ */
+public class ProducerConsumerQueue<T> {
+
+ private static final int DEFAULT_BUFF_LENGHT = 1000;
+ private static final Object exitToken = new Object();
+
+ //doesn't use generics here as exitToken needs to be put in the queue too:
+ @SuppressWarnings("unchecked")
+ private final BlockingQueue queue;
+ private final AtomicInteger producersToWaitFor;
+
+ /**
+ * @param producersToWaitFor The number of producer threads.
+ */
+ public ProducerConsumerQueue( int producersToWaitFor ) {
+ this( DEFAULT_BUFF_LENGHT, producersToWaitFor );
+ }
+
+ @SuppressWarnings("unchecked")
+ public ProducerConsumerQueue( int queueLenght, int producersToWaitFor ) {
+ queue = new ArrayBlockingQueue( queueLenght );
+ this.producersToWaitFor = new AtomicInteger( producersToWaitFor );
+ }
+
+ /**
+ * Blocks until an object is available; when null
+ * is returned the client thread should quit.
+ * @return the next object in the queue, or null to exit
+ * @throws InterruptedException
+ */
+ @SuppressWarnings("unchecked")
+ public T take() throws InterruptedException {
+ Object obj = queue.take();
+ if ( obj == exitToken ) {
+ //restore exit signal for other threads
+ queue.put( exitToken );
+ return null;
+ }
+ else {
+ return (T)obj;
+ }
+ }
+
+ /**
+ * Adds a new object to the queue, blocking if no space is
+ * available.
+ * @param obj
+ * @throws InterruptedException
+ */
+ @SuppressWarnings("unchecked")
+ public void put(T obj) throws InterruptedException {
+ queue.put( obj );
+ }
+
+ /**
+ * Each producer thread should call producerStopping() when it has
+ * finished. After doing it can safely terminate.
+ * After all producer threads have called producerStopping()
+ * a token will be inserted in the blocking queue to eventually
+ * awake sleeping consumers and have them quit, after the
+ * queue has been processed.
+ */
+ @SuppressWarnings("unchecked")
+ public void producerStopping() {
+ int activeProducers = producersToWaitFor.decrementAndGet();
+ //last producer must close consumers
+ if ( activeProducers == 0 ) {
+ try {
+ queue.put( exitToken );//awake all waiting threads to let them quit.
+ } catch (InterruptedException e) {
+ //just quit, consumers will be interrupted anyway if it's a shutdown.
+ }
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/batchindexing/ProducerConsumerQueue.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/BridgeFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/BridgeFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/BridgeFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,360 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.net.URI;
+import java.net.URL;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Calendar;
+
+import org.hibernate.AssertionFailure;
+import org.hibernate.HibernateException;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.bridge.builtin.StringBridge;
+import org.hibernate.search.bridge.builtin.CharacterBridge;
+import org.hibernate.search.bridge.builtin.DoubleBridge;
+import org.hibernate.search.bridge.builtin.FloatBridge;
+import org.hibernate.search.bridge.builtin.ShortBridge;
+import org.hibernate.search.bridge.builtin.IntegerBridge;
+import org.hibernate.search.bridge.builtin.LongBridge;
+import org.hibernate.search.bridge.builtin.BigIntegerBridge;
+import org.hibernate.search.bridge.builtin.BigDecimalBridge;
+import org.hibernate.search.bridge.builtin.BooleanBridge;
+import org.hibernate.search.bridge.builtin.UrlBridge;
+import org.hibernate.search.bridge.builtin.UriBridge;
+import org.hibernate.search.bridge.builtin.DateBridge;
+import org.hibernate.search.bridge.builtin.CalendarBridge;
+import org.hibernate.search.bridge.builtin.EnumBridge;
+
+/**
+ * This factory is responsible for creating and initializing build-in and custom <i>FieldBridges</i>.
+ *
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ */
+public class BridgeFactory {
+ private static Map<String, FieldBridge> builtInBridges = new HashMap<String, FieldBridge>();
+
+ private BridgeFactory() {
+ }
+
+ public static final TwoWayFieldBridge CHARACTER = new TwoWayString2FieldBridgeAdaptor( new CharacterBridge() );
+
+ public static final TwoWayFieldBridge DOUBLE = new TwoWayString2FieldBridgeAdaptor( new DoubleBridge() );
+
+ public static final TwoWayFieldBridge FLOAT = new TwoWayString2FieldBridgeAdaptor( new FloatBridge() );
+
+ public static final TwoWayFieldBridge SHORT = new TwoWayString2FieldBridgeAdaptor( new ShortBridge() );
+
+ public static final TwoWayFieldBridge INTEGER = new TwoWayString2FieldBridgeAdaptor( new IntegerBridge() );
+
+ public static final TwoWayFieldBridge LONG = new TwoWayString2FieldBridgeAdaptor( new LongBridge() );
+
+ public static final TwoWayFieldBridge BIG_INTEGER = new TwoWayString2FieldBridgeAdaptor( new BigIntegerBridge() );
+
+ public static final TwoWayFieldBridge BIG_DECIMAL = new TwoWayString2FieldBridgeAdaptor( new BigDecimalBridge() );
+
+ public static final TwoWayFieldBridge STRING = new TwoWayString2FieldBridgeAdaptor( new StringBridge() );
+
+ public static final TwoWayFieldBridge BOOLEAN = new TwoWayString2FieldBridgeAdaptor( new BooleanBridge() );
+
+ public static final TwoWayFieldBridge CLAZZ = new TwoWayString2FieldBridgeAdaptor( new org.hibernate.search.bridge.builtin.ClassBridge() );
+
+ public static final TwoWayFieldBridge Url = new TwoWayString2FieldBridgeAdaptor( new UrlBridge() );
+
+ public static final TwoWayFieldBridge Uri = new TwoWayString2FieldBridgeAdaptor( new UriBridge() );
+
+ public static final FieldBridge DATE_YEAR = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_YEAR );
+ public static final FieldBridge DATE_MONTH = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_MONTH );
+ public static final FieldBridge DATE_DAY = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_DAY );
+ public static final FieldBridge DATE_HOUR = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_HOUR );
+ public static final FieldBridge DATE_MINUTE = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_MINUTE );
+ public static final FieldBridge DATE_SECOND = new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_SECOND );
+
+ public static final FieldBridge CALENDAR_YEAR = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_YEAR );
+ public static final FieldBridge CALENDAR_MONTH = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_MONTH );
+ public static final FieldBridge CALENDAR_DAY = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_DAY );
+ public static final FieldBridge CALENDAR_HOUR = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_HOUR );
+ public static final FieldBridge CALENDAR_MINUTE = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_MINUTE );
+ public static final FieldBridge CALENDAR_SECOND = new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_SECOND );
+
+ public static final TwoWayFieldBridge DATE_MILLISECOND =
+ new TwoWayString2FieldBridgeAdaptor( DateBridge.DATE_MILLISECOND );
+
+ public static final TwoWayFieldBridge CALENDAR_MILLISECOND =
+ new TwoWayString2FieldBridgeAdaptor( CalendarBridge.CALENDAR_MILLISECOND );
+
+
+ static {
+ builtInBridges.put( Character.class.getName(), CHARACTER );
+ builtInBridges.put( char.class.getName(), CHARACTER );
+ builtInBridges.put( Double.class.getName(), DOUBLE );
+ builtInBridges.put( double.class.getName(), DOUBLE );
+ builtInBridges.put( Float.class.getName(), FLOAT );
+ builtInBridges.put( float.class.getName(), FLOAT );
+ builtInBridges.put( Short.class.getName(), SHORT );
+ builtInBridges.put( short.class.getName(), SHORT );
+ builtInBridges.put( Integer.class.getName(), INTEGER );
+ builtInBridges.put( int.class.getName(), INTEGER );
+ builtInBridges.put( Long.class.getName(), LONG );
+ builtInBridges.put( long.class.getName(), LONG );
+ builtInBridges.put( BigInteger.class.getName(), BIG_INTEGER );
+ builtInBridges.put( BigDecimal.class.getName(), BIG_DECIMAL );
+ builtInBridges.put( String.class.getName(), STRING );
+ builtInBridges.put( Boolean.class.getName(), BOOLEAN );
+ builtInBridges.put( boolean.class.getName(), BOOLEAN );
+ builtInBridges.put( Class.class.getName(), CLAZZ );
+ builtInBridges.put( URL.class.getName(), Url );
+ builtInBridges.put( URI.class.getName(), Uri );
+
+ builtInBridges.put( Date.class.getName(), DATE_MILLISECOND );
+ builtInBridges.put( Calendar.class.getName(), CALENDAR_MILLISECOND);
+ }
+
+ /**
+ * This extracts and instantiates the implementation class from a ClassBridge
+ * annotation.
+ *
+ * @param cb the ClassBridge
+ * @return FieldBridge
+ */
+ public static FieldBridge extractType(ClassBridge cb)
+ {
+ FieldBridge bridge = null;
+
+ if ( cb != null ) {
+ Class<?> impl = cb.impl();
+ //TODO better error information ( see guessType() )
+ if (impl != null) {
+ try {
+ Object instance = impl.newInstance();
+ if ( FieldBridge.class.isAssignableFrom( impl ) ) {
+ bridge = (FieldBridge) instance;
+ }
+ else if ( org.hibernate.search.bridge.TwoWayStringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new TwoWayString2FieldBridgeAdaptor(
+ (org.hibernate.search.bridge.TwoWayStringBridge) instance );
+ }
+ else if ( org.hibernate.search.bridge.StringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new String2FieldBridgeAdaptor( (org.hibernate.search.bridge.StringBridge) instance );
+ }
+ else {
+ throw new SearchException("@ClassBridge implementation implements none of the field bridge interfaces: "
+ + impl );
+ }
+ if ( cb.params().length > 0 && ParameterizedBridge.class.isAssignableFrom( impl ) ) {
+ Map<String, String> params = new HashMap<String, String>( cb.params().length );
+ for ( Parameter param : cb.params() ) {
+ params.put( param.name(), param.value() );
+ }
+ ( (ParameterizedBridge) instance ).setParameterValues( params );
+ }
+ }
+ catch (Exception e) {
+ throw new HibernateException( "Unable to instantiate ClassBridge for " + impl.getName(), e );
+ }
+ }
+ }
+ if ( bridge == null ) throw new SearchException( "Unable to guess FieldBridge for " + ClassBridge.class.getName() );
+
+ return bridge;
+ }
+
+ public static FieldBridge guessType(Field field, XMember member, ReflectionManager reflectionManager) {
+ FieldBridge bridge;
+ org.hibernate.search.annotations.FieldBridge bridgeAnn;
+ //@Field bridge has priority over @FieldBridge
+ if ( field != null && void.class != field.bridge().impl() ) {
+ bridgeAnn = field.bridge();
+ }
+ else {
+ bridgeAnn = member.getAnnotation( org.hibernate.search.annotations.FieldBridge.class );
+ }
+ final String memberName = member.getName();
+ if ( bridgeAnn != null ) {
+ bridge = doExtractType( bridgeAnn, memberName );
+ }
+ else if ( member.isAnnotationPresent( org.hibernate.search.annotations.DateBridge.class ) ) {
+ Resolution resolution = member.getAnnotation( org.hibernate.search.annotations.DateBridge.class ).resolution();
+ bridge = getDateField( resolution );
+ }
+ else if ( member.isAnnotationPresent( org.hibernate.search.annotations.CalendarBridge.class ) ) {
+ Resolution resolution = member.getAnnotation( org.hibernate.search.annotations.CalendarBridge.class ).resolution();
+ bridge = getCalendarField( resolution );
+ }
+ else {
+ //find in built-ins
+ XClass returnType = member.getType();
+ bridge = builtInBridges.get( returnType.getName() );
+ if ( bridge == null && returnType.isEnum() ) {
+ @SuppressWarnings( "unchecked" )
+ final Class<? extends Enum> enumClass = reflectionManager.toClass( returnType );
+ bridge = new TwoWayString2FieldBridgeAdaptor( new EnumBridge( enumClass ) );
+ }
+ }
+ //TODO add classname
+ if ( bridge == null ) throw new SearchException( "Unable to guess FieldBridge for " + memberName );
+ return bridge;
+ }
+
+ /** assume not null bridgeAnn */
+ private static FieldBridge doExtractType(org.hibernate.search.annotations.FieldBridge bridgeAnn, String memberName) {
+ assert bridgeAnn != null : "doExtractType assume bridge instance not null";
+ FieldBridge bridge;
+ Class impl = bridgeAnn.impl();
+ if (impl == void.class)
+ throw new SearchException("@FieldBridge with no implementation class defined in: " + memberName );
+ try {
+ Object instance = impl.newInstance();
+ if ( FieldBridge.class.isAssignableFrom( impl ) ) {
+ bridge = (FieldBridge) instance;
+ }
+ else if ( TwoWayStringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new TwoWayString2FieldBridgeAdaptor(
+ ( TwoWayStringBridge) instance );
+ }
+ else if ( org.hibernate.search.bridge.StringBridge.class.isAssignableFrom( impl ) ) {
+ bridge = new String2FieldBridgeAdaptor( (org.hibernate.search.bridge.StringBridge) instance );
+ }
+ else {
+ throw new SearchException("@FieldBridge implementation implements none of the field bridge interfaces: "
+ + impl + " in " + memberName
+ );
+ }
+ if ( bridgeAnn.params().length > 0 && ParameterizedBridge.class.isAssignableFrom( impl ) ) {
+ Map<String, String> params = new HashMap<String, String>( bridgeAnn.params().length );
+ for ( Parameter param : bridgeAnn.params() ) {
+ params.put( param.name(), param.value() );
+ }
+ ( (ParameterizedBridge) instance ).setParameterValues( params );
+ }
+ }
+ catch (Exception e) {
+ //TODO add classname
+ throw new SearchException( "Unable to instanciate FieldBridge for " + memberName, e );
+ }
+ return bridge;
+ }
+
+ public static FieldBridge getDateField(Resolution resolution) {
+ switch (resolution) {
+ case YEAR:
+ return DATE_YEAR;
+ case MONTH:
+ return DATE_MONTH;
+ case DAY:
+ return DATE_DAY;
+ case HOUR:
+ return DATE_HOUR;
+ case MINUTE:
+ return DATE_MINUTE;
+ case SECOND:
+ return DATE_SECOND;
+ case MILLISECOND:
+ return DATE_MILLISECOND;
+ default:
+ throw new AssertionFailure( "Unknown Resolution: " + resolution );
+ }
+ }
+
+
+ public static FieldBridge getCalendarField(Resolution resolution) {
+ switch (resolution) {
+ case YEAR:
+ return CALENDAR_YEAR;
+ case MONTH:
+ return CALENDAR_MONTH;
+ case DAY:
+ return CALENDAR_DAY;
+ case HOUR:
+ return CALENDAR_HOUR;
+ case MINUTE:
+ return CALENDAR_MINUTE;
+ case SECOND:
+ return CALENDAR_SECOND;
+ case MILLISECOND:
+ return CALENDAR_MILLISECOND;
+ default:
+ throw new AssertionFailure( "Unknown Resolution: " + resolution );
+ }
+ }
+
+ /**
+ * Takes in a fieldBridge and will return you a TwoWayFieldBridge instance.
+ *
+ * @param fieldBridge
+ *
+ * @return a TwoWayFieldBridge instance if the Field Bridge is an instance of a TwoWayFieldBridge.
+ *
+ * @throws SearchException if the FieldBridge passed in is not an instance of a TwoWayFieldBridge.
+ */
+
+ public static TwoWayFieldBridge extractTwoWayType(org.hibernate.search.annotations.FieldBridge fieldBridge) {
+ FieldBridge fb = extractType( fieldBridge );
+ if ( fb instanceof TwoWayFieldBridge ) {
+ return ( TwoWayFieldBridge ) fb;
+ }
+ else {
+ throw new SearchException( "FieldBridge passed in is not an instance of " + TwoWayFieldBridge.class.getSimpleName() );
+ }
+ }
+
+ /**
+ * This extracts and instantiates the implementation class from a ClassBridge
+ * annotation.
+ *
+ * @param fieldBridgeAnnotation the FieldBridge annotation
+ *
+ * @return FieldBridge
+ */
+ public static FieldBridge extractType(org.hibernate.search.annotations.FieldBridge fieldBridgeAnnotation) {
+ FieldBridge bridge = null;
+
+ if ( fieldBridgeAnnotation != null ) {
+ bridge = doExtractType( fieldBridgeAnnotation, null );
+ }
+
+ if ( bridge == null ) {
+ throw new SearchException(
+ "Unable to guess FieldBridge for " + org.hibernate.search.annotations.FieldBridge.class.getName()
+ );
+ }
+
+ return bridge;
+ }
+
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/BridgeFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/FieldBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/FieldBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/FieldBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * Link between a java property and a Lucene Document
+ * Usually a Java property will be linked to a Document Field.
+ * <p/>
+ * All implementations need to be threadsafe.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FieldBridge {
+
+ /**
+ * Manipulate the document to index the given value.
+ * <p/>
+ * A common implementation is to add a Field with the given {@code name} to {@code document} following
+ * the parameters {@code luceneOptions} if the {@code value} is not {@code null}.
+ *
+ * @param name The field to add to the Lucene document
+ * @param value The actual value to index
+ * @param document The Lucene document into which we want to index the value.
+ * @param luceneOptions Contains the parameters used for adding {@code value} to
+ * the Lucene document.
+ */
+ void set(String name, Object value, Document document, LuceneOptions luceneOptions);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/FieldBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/LuceneOptions.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/LuceneOptions.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/LuceneOptions.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Field;
+
+/**
+ * A wrapper class for Lucene parameters needed for indexing.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface LuceneOptions {
+ Field.Store getStore();
+
+ Field.Index getIndex();
+
+ Field.TermVector getTermVector();
+
+ /**
+ * @return the boost value. If <code>boost == null</code>, the default boost value
+ * 1.0 is returned.
+ */
+ Float getBoost();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/LuceneOptions.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/ParameterizedBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/ParameterizedBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/ParameterizedBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,41 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import java.util.Map;
+
+/**
+ * Allow parameter injection to a given bridge.
+ *
+ * Implementors need to be threadsafe, but the
+ * setParameterValues method doesn't need any
+ * guard as initialization is always safe.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ParameterizedBridge {
+ //TODO inject Properties? since the annotations cannot support Object attribute?
+ void setParameterValues(Map parameters);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/ParameterizedBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Bridge to use a StringBridge as a FieldBridge.
+ *
+ * @author Emmanuel Bernard
+ */
+public class String2FieldBridgeAdaptor implements FieldBridge {
+ private final StringBridge stringBridge;
+
+ public String2FieldBridgeAdaptor(StringBridge stringBridge) {
+ this.stringBridge = stringBridge;
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ String indexedString = stringBridge.objectToString( value );
+ //Do not add fields on empty strings, seems a sensible default in most situations
+ //TODO if Store, probably also save empty ones
+ if ( StringHelper.isNotEmpty( indexedString ) ) {
+ Field field = new Field( name, indexedString, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/String2FieldBridgeAdaptor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/StringBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/StringBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/StringBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+/**
+ * Transform an object into a string representation.
+ *
+ * All implementations are required to be threadsafe.
+ * Usually this is easily achieved avoiding the usage
+ * of class fields, unless they are either immutable
+ * or needed to store parameters.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface StringBridge {
+
+ /**
+ * Converts the object representation to a string.
+ *
+ * @param object The object to transform into a string representation.
+ * @return String representation of the given object to be stored in Lucene index. The return string must not be
+ * <code>null</code>. It can be empty though.
+ */
+ String objectToString(Object object);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/StringBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayFieldBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayFieldBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayFieldBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,55 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+
+/**
+ * A <code>FieldBridge</code> able to convert the index representation back into an object without losing information.
+ * Any bridge expected to process a document id should implement this interface.
+ *
+ * @author Emmanuel Bernard
+ */
+// FIXME rework the interface inheritance there are some common concepts with StringBridge
+public interface TwoWayFieldBridge extends FieldBridge {
+ /**
+ * Build the element object from the <code>Document</code>
+ *
+ * @param name field name
+ * @param document document
+ *
+ * @return The return value is the entity property value.
+ */
+ Object get(String name, Document document);
+
+ /**
+ * Convert the object representation to a string.
+ *
+ * @param object The object to index.
+ * @return string (index) representationT of the specified object. Must not be <code>null</code>, but
+ * can be empty.
+ */
+ String objectToString(Object object);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayFieldBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,58 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+/**
+ * Bridge to use a TwoWayStringBridge as a TwoWayFieldBridge
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO use Generics to avoid double declaration of stringBridge
+public class TwoWayString2FieldBridgeAdaptor extends String2FieldBridgeAdaptor implements TwoWayFieldBridge {
+
+ private final TwoWayStringBridge stringBridge;
+
+ public TwoWayString2FieldBridgeAdaptor(TwoWayStringBridge stringBridge) {
+ super( stringBridge );
+ this.stringBridge = stringBridge;
+ }
+
+ public String objectToString(Object object) {
+ return stringBridge.objectToString( object );
+ }
+
+ public Object get(String name, Document document) {
+ Field field = document.getField( name );
+ if (field == null) {
+ return stringBridge.stringToObject( null );
+ }
+ else {
+ return stringBridge.stringToObject( field.stringValue() );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayString2FieldBridgeAdaptor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayStringBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayStringBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayStringBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,50 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge;
+
+/**
+ * <code>StringBridge<code> allowing a translation from the string representation back to the <code>Object</code>.
+ * <code>objectToString( stringToObject( string ) )</code> and <code>stringToObject( objectToString( object ) )</code>
+ * should be "idempotent". More precisely:
+ * <ul>
+ * <li><code>objectToString( stringToObject( string ) ).equals(string)</code>, for non <code>null</code> string.</li>
+ * <li><code>stringToObject( objectToString( object ) ).equals(object)</code>, for non <code>null</code> object. </li>
+ * </ul>
+ *
+ * As for all Bridges implementations must be threasafe.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface TwoWayStringBridge extends StringBridge {
+
+ /**
+ * Convert the index string representation to an object.
+ *
+ * @param stringValue The index value.
+ * @return Takes the string representation from the Lucene index and transforms it back into the original
+ * <code>Object</code>.
+ */
+ Object stringToObject(String stringValue);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/TwoWayStringBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,41 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import java.math.BigDecimal;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a BigDecimal element
+ *
+ * @author Emmanuel Bernard
+ */
+public class BigDecimalBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new BigDecimal( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigDecimalBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,43 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import java.math.BigInteger;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a <code>BigInteger</code> element.
+ *
+ * @author Emmanuel Bernard
+ */
+public class BigIntegerBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ return new BigInteger( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BigIntegerBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BooleanBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BooleanBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BooleanBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a boolean field
+ *
+ * @author Sylvain Vieujot
+ */
+public class BooleanBridge implements TwoWayStringBridge {
+
+ public Boolean stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty(stringValue) ) return null;
+ return Boolean.valueOf( stringValue );
+ }
+
+ public String objectToString(Object object) {
+ return object == null ?
+ null :
+ object.toString();
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/BooleanBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CalendarBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CalendarBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CalendarBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,94 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.util.StringHelper;
+import org.apache.lucene.document.DateTools;
+
+import java.util.Date;
+import java.util.Calendar;
+import java.util.Locale;
+import java.util.Map;
+import java.text.ParseException;
+
+public class CalendarBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static final String RESOLUTION_PARAMETER = "resolution";
+
+ private DateTools.Resolution resolution;
+ public static final TwoWayStringBridge CALENDAR_YEAR = new CalendarBridge( Resolution.YEAR );
+ public static final TwoWayStringBridge CALENDAR_MONTH = new CalendarBridge( Resolution.MONTH );
+ public static final TwoWayStringBridge CALENDAR_DAY = new CalendarBridge( Resolution.DAY );
+ public static final TwoWayStringBridge CALENDAR_HOUR = new CalendarBridge( Resolution.HOUR );
+ public static final TwoWayStringBridge CALENDAR_MINUTE = new CalendarBridge( Resolution.MINUTE );
+ public static final TwoWayStringBridge CALENDAR_SECOND = new CalendarBridge( Resolution.SECOND );
+ public static final TwoWayStringBridge CALENDAR_MILLISECOND = new CalendarBridge( Resolution.MILLISECOND );
+
+ public CalendarBridge() {
+ }
+
+ public CalendarBridge(Resolution resolution) {
+ this.resolution = DateResolutionUtil.getLuceneResolution( resolution);
+ }
+
+ public void setParameterValues(Map parameters) {
+ Object resolution = parameters.get( RESOLUTION_PARAMETER );
+ Resolution hibResolution;
+ if ( resolution instanceof String ) {
+ hibResolution = Resolution.valueOf( ( (String) resolution ).toUpperCase( Locale.ENGLISH ) );
+ }
+ else {
+ hibResolution = (Resolution) resolution;
+ }
+ this.resolution = DateResolutionUtil.getLuceneResolution( hibResolution );
+ }
+
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ try {
+ Date date = DateTools.stringToDate( stringValue );
+ Calendar calendar = Calendar.getInstance();
+ calendar.setTime( date );
+ return calendar;
+ } catch (ParseException e) {
+ throw new SearchException( "Unable to parse into calendar: " + stringValue, e );
+ }
+ }
+
+ public String objectToString(Object object) {
+ if (object == null) {
+ return null;
+ }
+ Calendar calendar = (Calendar)object;
+ return DateTools.dateToString(calendar.getTime(),resolution);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CalendarBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CharacterBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CharacterBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CharacterBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a character element
+ *
+ * @author Davide D'Alto
+ */
+public class CharacterBridge implements org.hibernate.search.bridge.TwoWayStringBridge {
+
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ if ( stringValue.length() > 1 ) {
+ throw new IllegalArgumentException( "<" + stringValue + "> is not a char" );
+ }
+ return stringValue.charAt( 0 );
+ }
+
+ public String objectToString(Object object) {
+ return object == null
+ ? null
+ : object.toString();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/CharacterBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ClassBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ClassBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ClassBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,58 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.SearchException;
+import org.hibernate.util.StringHelper;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * Convert a Class back and forth
+ *
+ * @author Emmanuel Bernard
+ */
+public class ClassBridge implements TwoWayStringBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ else {
+ try {
+ return ReflectHelper.classForName( stringValue, ClassBridge.class );
+ }
+ catch (ClassNotFoundException e) {
+ throw new SearchException("Unable to deserialize Class: " + stringValue, e);
+ }
+ }
+ }
+
+ public String objectToString(Object object) {
+ return object == null ?
+ null :
+ ( (Class) object).getName();
+
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ClassBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,102 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import java.text.ParseException;
+import java.util.Date;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.document.DateTools;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Bridge a java.util.Date to a String, truncated to the resolution
+ * Date are stored GMT based
+ * <p/>
+ * ie
+ * Resolution.YEAR: yyyy
+ * Resolution.MONTH: yyyyMM
+ * Resolution.DAY: yyyyMMdd
+ * Resolution.HOUR: yyyyMMddHH
+ * Resolution.MINUTE: yyyyMMddHHmm
+ * Resolution.SECOND: yyyyMMddHHmmss
+ * Resolution.MILLISECOND: yyyyMMddHHmmssSSS
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO split into StringBridge and TwoWayStringBridge?
+public class DateBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static final TwoWayStringBridge DATE_YEAR = new DateBridge( Resolution.YEAR );
+ public static final TwoWayStringBridge DATE_MONTH = new DateBridge( Resolution.MONTH );
+ public static final TwoWayStringBridge DATE_DAY = new DateBridge( Resolution.DAY );
+ public static final TwoWayStringBridge DATE_HOUR = new DateBridge( Resolution.HOUR );
+ public static final TwoWayStringBridge DATE_MINUTE = new DateBridge( Resolution.MINUTE );
+ public static final TwoWayStringBridge DATE_SECOND = new DateBridge( Resolution.SECOND );
+ public static final TwoWayStringBridge DATE_MILLISECOND = new DateBridge( Resolution.MILLISECOND );
+
+ private DateTools.Resolution resolution;
+
+ public DateBridge() {
+ }
+
+ public DateBridge(Resolution resolution) {
+ this.resolution = DateResolutionUtil.getLuceneResolution(resolution);
+ }
+
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ try {
+ return DateTools.stringToDate( stringValue );
+ }
+ catch (ParseException e) {
+ throw new SearchException( "Unable to parse into date: " + stringValue, e );
+ }
+ }
+
+ public String objectToString(Object object) {
+ return object != null ?
+ DateTools.dateToString( (Date) object, resolution ) :
+ null;
+ }
+
+ public void setParameterValues(Map parameters) {
+ Object resolution = parameters.get( "resolution" );
+ Resolution hibResolution;
+ if ( resolution instanceof String ) {
+ hibResolution = Resolution.valueOf( ( (String) resolution ).toUpperCase( Locale.ENGLISH ) );
+ }
+ else {
+ hibResolution = (Resolution) resolution;
+ }
+ this.resolution = DateResolutionUtil.getLuceneResolution( hibResolution );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateResolutionUtil.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateResolutionUtil.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateResolutionUtil.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.DateTools.Resolution;
+import org.hibernate.AssertionFailure;
+
+public class DateResolutionUtil {
+
+ private DateResolutionUtil() {}
+
+
+ public static Resolution getLuceneResolution(org.hibernate.search.annotations.Resolution hibResolution) {
+ Resolution resolution = null;
+ switch (hibResolution) {
+ case YEAR:
+ resolution = DateTools.Resolution.YEAR;
+ break;
+ case MONTH:
+ resolution = DateTools.Resolution.MONTH;
+ break;
+ case DAY:
+ resolution = DateTools.Resolution.DAY;
+ break;
+ case HOUR:
+ resolution = DateTools.Resolution.HOUR;
+ break;
+ case MINUTE:
+ resolution = DateTools.Resolution.MINUTE;
+ break;
+ case SECOND:
+ resolution = DateTools.Resolution.SECOND;
+ break;
+ case MILLISECOND:
+ resolution = DateTools.Resolution.MILLISECOND;
+ break;
+ default:
+ throw new AssertionFailure( "Unknown Resolution: " + hibResolution );
+ }
+ return resolution;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DateResolutionUtil.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DoubleBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DoubleBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DoubleBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a double element
+ *
+ * @author Emmanuel Bernard
+ */
+public class DoubleBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Double( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/DoubleBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/EnumBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/EnumBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/EnumBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.util.StringHelper;
+
+
+/**
+ * Map an Enum field
+ *
+ * @author Sylvain Vieujot
+ */
+public class EnumBridge implements TwoWayStringBridge {
+
+ private Class<? extends Enum> clazz = null;
+
+ /**
+ * @param clazz the class of the enum.
+ */
+ public EnumBridge(Class<? extends Enum> clazz) {
+ this.clazz = clazz;
+ }
+
+ public Enum<? extends Enum> stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return Enum.valueOf( clazz, stringValue );
+ }
+
+ public String objectToString(Object object) {
+ Enum e = (Enum) object;
+ return e != null ? e.name() : null;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/EnumBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/FloatBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/FloatBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/FloatBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a float element
+ *
+ * @author Emmanuel Bernard
+ */
+public class FloatBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Float( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/FloatBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/IntegerBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/IntegerBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/IntegerBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map an integer element
+ *
+ * @author Emmanuel Bernard
+ */
+public class IntegerBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Integer( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/IntegerBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/LongBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/LongBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/LongBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a long element
+ *
+ * @author Emmanuel Bernard
+ */
+public class LongBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Long( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/LongBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/NumberBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/NumberBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/NumberBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+
+/**
+ * Base class for numbers - integer, double, etc.
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class NumberBridge implements TwoWayStringBridge {
+ public String objectToString(Object object) {
+ return object != null ?
+ object.toString() :
+ null;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/NumberBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ShortBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ShortBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ShortBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import org.hibernate.util.StringHelper;
+
+/**
+ * Map a short element
+ *
+ * @author Emmanuel Bernard
+ */
+public class ShortBridge extends NumberBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) return null;
+ return new Short( stringValue );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/ShortBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/StringBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/StringBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/StringBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+/**
+ * Map a string element
+ *
+ * @author Emmanuel Bernard
+ */
+public class StringBridge implements org.hibernate.search.bridge.TwoWayStringBridge {
+ public Object stringToObject(String stringValue) {
+ return stringValue;
+ }
+
+ public String objectToString(Object object) {
+ return (String) object;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/StringBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UriBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UriBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UriBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,59 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.hibernate.util.StringHelper;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+
+/**
+ * Bridge for <code>URI</code>
+ *
+ * @author Emmanuel Bernard
+ */
+public class UriBridge implements TwoWayStringBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ else {
+ try {
+ return new URI( stringValue );
+ }
+ catch (URISyntaxException e) {
+ throw new SearchException( "Unable to build URI: " + stringValue, e );
+ }
+ }
+ }
+
+ public String objectToString(Object object) {
+ return object == null ?
+ null :
+ object.toString();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UriBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UrlBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UrlBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UrlBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,59 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.bridge.builtin;
+
+import java.net.URL;
+import java.net.MalformedURLException;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.SearchException;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Bridge for <code>URL</code>s.
+ *
+ * @author Emmanuel Bernard
+ */
+public class UrlBridge implements TwoWayStringBridge {
+ public Object stringToObject(String stringValue) {
+ if ( StringHelper.isEmpty( stringValue ) ) {
+ return null;
+ }
+ else {
+ try {
+ return new URL( stringValue );
+ }
+ catch ( MalformedURLException e ) {
+ throw new SearchException( "Unable to build URL: " + stringValue, e );
+ }
+ }
+ }
+
+ public String objectToString(Object object) {
+ return object == null ?
+ null :
+ object.toString();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/bridge/builtin/UrlBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/AnalyzerDefMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/AnalyzerDefMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/AnalyzerDefMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,76 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.apache.solr.analysis.TokenFilterFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class AnalyzerDefMapping {
+ private SearchMapping mapping;
+ private Map<String, Object> analyzerDef;
+ private Map<String, Object> tokenizer;
+
+ AnalyzerDefMapping(String name, Class<? extends TokenizerFactory> tokenizerFactory, SearchMapping mapping) {
+ this.mapping = mapping;
+ this.analyzerDef = new HashMap<String, Object>();
+ mapping.addAnalyzerDef(analyzerDef);
+ analyzerDef.put( "name", name );
+ tokenizer = new HashMap<String, Object>();
+ tokenizer.put( "factory", tokenizerFactory );
+ analyzerDef.put( "tokenizer", tokenizer );
+ }
+
+ /**
+ * @TokenizerDef(, ... params={@Parameter(name="name", value="value"), ...})
+ */
+ public AnalyzerDefMapping tokenizerParam(String name, String value) {
+ Map<String, Object> param = SearchMapping.addElementToAnnotationArray(tokenizer, "params");
+ param.put("name", name);
+ param.put("value", value);
+ return this;
+ }
+
+ /**
+ * @TokenFilterDef(factory=factory)
+ */
+ public TokenFilterDefMapping filter(Class<? extends TokenFilterFactory> factory) {
+ return new TokenFilterDefMapping(factory, analyzerDef, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType,mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/AnalyzerDefMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/CalendarBridgeMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/CalendarBridgeMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/CalendarBridgeMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.Resolution;
+
+public class CalendarBridgeMapping {
+
+ private final SearchMapping mapping;
+ private final Map<String, Object> resolution;
+ private EntityDescriptor entity;
+ private PropertyDescriptor property;
+
+ public CalendarBridgeMapping(SearchMapping mapping,EntityDescriptor entity,PropertyDescriptor property, Resolution resolution) {
+ if (resolution == null) {
+ throw new SearchException("Resolution required in order to index calendar property");
+ }
+ this.mapping = mapping;
+ this.resolution = new HashMap<String, Object>();
+ this.entity = entity;
+ this.property = property;
+ this.resolution.put("resolution", resolution);
+ property.setCalendarBridge(this.resolution);
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/CalendarBridgeMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ClassBridgeMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ClassBridgeMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ClassBridgeMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,134 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+
+public class ClassBridgeMapping {
+
+ private final SearchMapping mapping;
+ private final EntityDescriptor entity;
+ private final Map<String, Object> classBridge;
+ private final EntityMapping entityMapping;
+
+
+ public ClassBridgeMapping(SearchMapping mapping, EntityDescriptor entity, Class<?> impl, EntityMapping entityMapping) {
+ this.mapping = mapping;
+ this.entity = entity;
+ this.entityMapping = entityMapping;
+ this.classBridge = new HashMap<String,Object>();
+ entity.addClassBridgeDef(classBridge);
+ if (impl != null) {
+ this.classBridge.put("impl", impl);
+ }
+
+ }
+
+ public ClassBridgeMapping name(String name) {
+ this.classBridge.put("name", name);
+ return this;
+ }
+
+ public ClassBridgeMapping store(Store store) {
+ this.classBridge.put("store", store);
+ return this;
+ }
+
+ public ClassBridgeMapping index(Index index) {
+ this.classBridge.put("index", index);
+ return this;
+ }
+
+ public ClassBridgeMapping termVector(TermVector termVector) {
+ this.classBridge.put("termVector", termVector);
+ return this;
+ }
+
+ public ClassBridgeMapping boost(float boost) {
+ final Map<String, Object> boostAnn = new HashMap<String, Object>();
+ boostAnn.put( "value", boost );
+ classBridge.put( "boost", boostAnn );
+ return this;
+ }
+
+ public ClassBridgeMapping analyzer(Class<?> analyzerClass) {
+ final Map<String, Object> analyzer = new HashMap<String, Object>();
+ analyzer.put( "impl", analyzerClass );
+ classBridge.put( "analyzer", analyzer );
+ return this;
+ }
+
+ public ClassBridgeMapping analyzer(String analyzerDef) {
+ final Map<String, Object> analyzer = new HashMap<String, Object>();
+ analyzer.put( "definition", analyzerDef );
+ classBridge.put( "analyzer", analyzer );
+ return this;
+ }
+
+
+ public ClassBridgeMapping param(String name, String value) {
+ Map<String, Object> param = SearchMapping.addElementToAnnotationArray(classBridge, "params");
+ param.put("name", name);
+ param.put("value", value);
+ return this;
+ }
+
+
+ public ClassBridgeMapping classBridge(Class<?> impl) {
+ return new ClassBridgeMapping(mapping, entity,impl,entityMapping );
+ }
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(mapping,name, impl);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public ProvidedIdMapping providedId() {
+ return new ProvidedIdMapping(mapping,entity);
+ }
+
+ public IndexedMapping indexed() {
+ return new IndexedMapping(mapping, entity, entityMapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ClassBridgeMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ConcatStringBridge.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ConcatStringBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ConcatStringBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.Map;
+
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.StringBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ConcatStringBridge implements StringBridge, ParameterizedBridge{
+ public static final String SIZE = "size";
+ private int size;
+
+ public String objectToString(Object object) {
+ if (object == null) return "";
+ if ( ! (object instanceof String) ) {
+ throw new RuntimeException( "not a string" );
+ }
+ String string = object.toString();
+ int maxSize = string.length() >= size ? size : string.length();
+ return string.substring( 0, maxSize );
+ }
+
+ public void setParameterValues(Map parameters) {
+ size = Integer.valueOf( (String) parameters.get( SIZE ) );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ConcatStringBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ContainedInMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ContainedInMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ContainedInMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+public class ContainedInMapping {
+
+ private final SearchMapping mapping;
+ private final PropertyDescriptor property;
+ private final EntityDescriptor entity;
+
+ public ContainedInMapping(SearchMapping mapping,PropertyDescriptor property, EntityDescriptor entity) {
+ this.mapping = mapping;
+ this.property = property;
+ this.entity = entity;
+ Map<String, Object> containedIn = new HashMap<String, Object>();
+ property.setContainedIn(containedIn);
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ContainedInMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DateBridgeMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DateBridgeMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DateBridgeMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.Resolution;
+
+public class DateBridgeMapping {
+
+ private final SearchMapping mapping;
+ private final Map<String, Object> resolution;
+ private EntityDescriptor entity;
+ private PropertyDescriptor property;
+
+ public DateBridgeMapping(SearchMapping mapping,EntityDescriptor entity,PropertyDescriptor property, Resolution resolution) {
+ if (resolution == null) {
+ throw new SearchException("Resolution required in order to index calendar property");
+ }
+ this.mapping = mapping;
+ this.resolution = new HashMap<String, Object>();
+ this.entity = entity;
+ this.property = property;
+ this.resolution.put("resolution", resolution);
+ property.setDateBridge(this.resolution);
+ }
+
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DateBridgeMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DocumentIdMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DocumentIdMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DocumentIdMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DocumentIdMapping {
+ private final SearchMapping mapping;
+ private final EntityDescriptor entity;
+ private final PropertyDescriptor property;
+ private final Map<String, Object> documentId = new HashMap<String, Object>();
+
+ public DocumentIdMapping(PropertyDescriptor property, EntityDescriptor entity, SearchMapping mapping) {
+ this.mapping = mapping;
+ this.entity = entity;
+ this.property = property;
+ property.setDocumentId( documentId );
+ }
+
+ public DocumentIdMapping name(String fieldName) {
+ documentId.put( "name", fieldName );
+ return this;
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/DocumentIdMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityDescriptor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityDescriptor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityDescriptor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,169 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class EntityDescriptor {
+ private Class<?> entityType;
+ private Map<String, Object> indexed;
+ private Map<PropertyKey, PropertyDescriptor> properties = new HashMap<PropertyKey, PropertyDescriptor>();
+ private Map<String, Object> similarity;
+ private Map<String, Object> boost;
+ private Map<String, Object> analyzerDiscriminator;
+ private Set<Map<String, Object>> fullTextFilterDefs = new HashSet<Map<String, Object>>();
+ private Map<String,Object> providedId;
+ private Set<Map<String,Object>> classBridges = new HashSet<Map<String,Object>>();
+ private Map<String, Object> dynamicBoost;
+
+ public Map<String, Object> getIndexed() {
+ return indexed;
+ }
+
+ public EntityDescriptor(Class<?> entityType) {
+ this.entityType = entityType;
+ }
+
+ public void setIndexed(Map<String, Object> indexed) {
+ this.indexed = indexed;
+ }
+
+ PropertyDescriptor getProperty(String name, ElementType type) {
+ PropertyKey propertyKey = new PropertyKey( name, type );
+ PropertyDescriptor descriptor = properties.get( propertyKey );
+ if (descriptor == null) {
+ descriptor = new PropertyDescriptor(name, type);
+ properties.put(propertyKey, descriptor);
+ }
+ return descriptor;
+ }
+
+ public PropertyDescriptor getPropertyDescriptor(String name, ElementType type) {
+ return properties.get( new PropertyKey( name, type ) );
+ }
+
+ public void setSimilariy(Map<String, Object> similarity) {
+ this.similarity = similarity;
+ }
+
+ public Map<String, Object> getSimilarity() {
+ return similarity;
+ }
+
+ public void setBoost(Map<String, Object> boost) {
+ this.boost = boost;
+ }
+
+ public Map<String, Object> getBoost() {
+ return boost;
+ }
+
+ public void setAnalyzerDiscriminator(Map<String, Object> analyzerDiscriminator) {
+ this.analyzerDiscriminator = analyzerDiscriminator;
+ }
+
+ public Map<String, Object> getAnalyzerDiscriminator() {
+ return analyzerDiscriminator;
+ }
+
+ public Set<Map<String, Object>> getFullTextFilterDefs() {
+ return fullTextFilterDefs;
+ }
+
+ public void addFulltextFilterDef(Map<String, Object> fullTextFilterDef) {
+ fullTextFilterDefs.add(fullTextFilterDef);
+ }
+
+
+ public void addClassBridgeDef(Map<String,Object> classBridge) {
+ classBridges.add(classBridge);
+ }
+
+ public Set<Map<String, Object>> getClassBridgeDefs() {
+ return classBridges;
+ }
+
+ public void setProvidedId(Map<String, Object> providedId) {
+ this.providedId = providedId;
+ }
+
+ public Map<String, Object> getProvidedId() {
+ return this.providedId;
+ }
+
+ public void setDynamicBoost(Map<String, Object> dynamicEntityBoost) {
+ this.dynamicBoost = dynamicEntityBoost;
+ }
+
+ public Map<String, Object> getDynamicBoost() {
+ return this.dynamicBoost;
+ }
+
+ private static class PropertyKey {
+ private String name;
+ private ElementType type;
+
+ PropertyKey(String name, ElementType type) {
+ this.name = name;
+ this.type = type;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if ( this == o ) {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() ) {
+ return false;
+ }
+
+ PropertyKey property = ( PropertyKey ) o;
+
+ if ( name != null ? !name.equals( property.name ) : property.name != null ) {
+ return false;
+ }
+ if ( type != property.type ) {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = name != null ? name.hashCode() : 0;
+ result = 31 * result + ( type != null ? type.hashCode() : 0 );
+ return result;
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityDescriptor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,101 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.analyzer.Discriminator;
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class EntityMapping {
+ private SearchMapping mapping;
+ private EntityDescriptor entity;
+
+ public EntityMapping(Class<?> entityType, SearchMapping mapping) {
+ this.mapping = mapping;
+ entity = mapping.getEntity(entityType);
+ }
+
+ public IndexedMapping indexed() {
+ return new IndexedMapping(mapping,entity, this);
+ }
+
+ public EntityMapping similarity(Class<?> impl) {
+ Map<String, Object> similarity = new HashMap<String, Object>(1);
+ similarity.put( "impl", impl );
+ entity.setSimilariy(similarity);
+ return this;
+ }
+
+ public EntityMapping boost(float boost) {
+ final Map<String, Object> boostAnn = new HashMap<String, Object>();
+ boostAnn.put( "value", boost );
+ entity.setBoost(boostAnn);
+ return this;
+ }
+
+
+ public EntityMapping dynamicBoost(Class<? extends BoostStrategy> impl) {
+ final Map<String, Object> dynamicBoost = new HashMap<String, Object>();
+ dynamicBoost.put("impl", impl);
+ entity.setDynamicBoost(dynamicBoost);
+ return this;
+ }
+
+ public EntityMapping analyzerDiscriminator(Class<? extends Discriminator> discriminator) {
+ final Map<String, Object> discriminatorAnn = new HashMap<String, Object>();
+ discriminatorAnn.put( "impl", discriminator );
+ entity.setAnalyzerDiscriminator(discriminatorAnn);
+ return this;
+ }
+
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(mapping,name, impl);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public ClassBridgeMapping classBridge(Class<?> impl) {
+ return new ClassBridgeMapping(mapping, entity, impl, this);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/EntityMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldBridgeMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldBridgeMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldBridgeMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,115 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.lang.annotation.ElementType;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.TermVector;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FieldBridgeMapping {
+ private final SearchMapping mapping;
+ private final EntityDescriptor entity;
+ private final PropertyDescriptor property;
+ private final FieldMapping fieldMapping;
+ private final Map<String, Object> bridge = new HashMap<String, Object>();
+
+ public FieldBridgeMapping(Class<?> impl, Map<String, Object> field,
+ FieldMapping fieldMapping,
+ PropertyDescriptor property,
+ EntityDescriptor entity,
+ SearchMapping mapping) {
+ this.mapping = mapping;
+ this.entity = entity;
+ this.property = property;
+ this.fieldMapping = fieldMapping;
+ bridge.put( "impl", impl );
+ field.put( "bridge", bridge );
+ }
+
+ public FieldBridgeMapping param(String name, String value) {
+ Map<String, Object> param = SearchMapping.addElementToAnnotationArray(bridge, "params");
+ param.put("name", name);
+ param.put("value", value);
+ return this;
+ }
+
+ //FieldMapping level
+ public FieldMapping name(String fieldName) {
+ return fieldMapping.name( fieldName );
+ }
+
+ public FieldMapping store(Store store) {
+ return fieldMapping.store( store );
+ }
+
+ public FieldMapping index(Index index) {
+ return fieldMapping.index( index );
+ }
+
+ public FieldMapping termVector(TermVector termVector) {
+ return fieldMapping.termVector( termVector );
+ }
+
+ public FieldMapping boost(float boost) {
+ return fieldMapping.boost( boost );
+ }
+
+ public FieldMapping analyzer(Class<?> analyzerClass) {
+ return fieldMapping.analyzer( analyzerClass );
+ }
+
+ public FieldMapping analyzer(String analyzerDef) {
+ return fieldMapping.analyzer( analyzerDef );
+ }
+
+ //PropertyMapping level
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ //EntityMapping level
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ //Global level
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldBridgeMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,122 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FieldMapping {
+ private final SearchMapping mapping;
+ private final EntityDescriptor entity;
+ private final PropertyDescriptor property;
+ private final Map<String, Object> field = new HashMap<String, Object>();
+
+ public FieldMapping(PropertyDescriptor property, EntityDescriptor entity, SearchMapping mapping) {
+ this.mapping = mapping;
+ this.entity = entity;
+ this.property = property;
+ property.addField(field);
+ }
+
+ public FieldMapping name(String fieldName) {
+ field.put( "name", fieldName );
+ return this;
+ }
+
+ public FieldMapping store(Store store) {
+ field.put( "store", store );
+ return this;
+ }
+
+ public FieldMapping index(Index index) {
+ field.put( "index", index );
+ return this;
+ }
+
+ public FieldMapping termVector(TermVector termVector) {
+ field.put( "termVector", termVector );
+ return this;
+ }
+
+ public FieldMapping boost(float boost) {
+ final Map<String, Object> boostAnn = new HashMap<String, Object>();
+ boostAnn.put( "value", boost );
+ field.put( "boost", boostAnn );
+ return this;
+ }
+
+ public FieldBridgeMapping bridge(Class<?> impl) {
+ return new FieldBridgeMapping( impl, field, this, property, entity, mapping );
+ }
+
+ public FieldMapping analyzer(Class<?> analyzerClass) {
+ final Map<String, Object> analyzer = new HashMap<String, Object>();
+ analyzer.put( "impl", analyzerClass );
+ field.put( "analyzer", analyzer );
+ return this;
+ }
+
+ public FieldMapping analyzer(String analyzerDef) {
+ final Map<String, Object> analyzer = new HashMap<String, Object>();
+ analyzer.put( "definition", analyzerDef );
+ field.put( "analyzer", analyzer );
+ return this;
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public DateBridgeMapping dateBridge(Resolution resolution) {
+ return new DateBridgeMapping(mapping, entity, property, resolution);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public CalendarBridgeMapping calendarBridge(Resolution resolution) {
+ return new CalendarBridgeMapping(mapping,entity,property, resolution);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FieldMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FullTextFilterDefMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FullTextFilterDefMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FullTextFilterDefMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.annotations.FilterCacheModeType;
+
+/**
+ * Mapping class responsible for collecting data for constructing
+ * FullTextFilterDef annotation.
+ */
+public class FullTextFilterDefMapping {
+
+ private final SearchMapping mapping;
+ private final Map<String,Object> fullTextFilterDef;
+
+ public FullTextFilterDefMapping(SearchMapping mapping, String name, Class<?> impl) {
+ this.mapping = mapping;
+ this.fullTextFilterDef =new HashMap<String, Object>();
+ this.fullTextFilterDef.put("name", name);
+ this.fullTextFilterDef.put("impl", impl);
+ mapping.addFulltextFilterDef(fullTextFilterDef);
+ }
+
+ /**
+ * Add cache implementation for fulltextfilterdef mapping
+ * @param cache
+ * @return FullTextFilterDefMapping
+ */
+ public FullTextFilterDefMapping cache(FilterCacheModeType cache) {
+ this.fullTextFilterDef.put("cache", cache);
+ return this;
+ }
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(mapping, name, impl);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/FullTextFilterDefMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexEmbeddedMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexEmbeddedMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexEmbeddedMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,79 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+public class IndexEmbeddedMapping {
+
+ private final SearchMapping mapping;
+ private final Map<String,Object> indexEmbedded;
+ private EntityDescriptor entity;
+ private PropertyDescriptor property;
+
+ public IndexEmbeddedMapping(SearchMapping mapping, PropertyDescriptor property, EntityDescriptor entity) {
+ this.mapping = mapping;
+ this.indexEmbedded = new HashMap<String, Object>();
+ this.property = property;
+ this.entity = entity;
+ this.property.setIndexEmbedded(indexEmbedded);
+ }
+
+ public IndexEmbeddedMapping prefix(String prefix) {
+ this.indexEmbedded.put("prefix",prefix);
+ return this;
+ }
+
+ public IndexEmbeddedMapping targetElement(Class<?> targetElement) {
+ this.indexEmbedded.put("targetElement",targetElement);
+ return this;
+ }
+
+ public IndexEmbeddedMapping depth(int depth) {
+ this.indexEmbedded.put("depth", depth);
+ return this;
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexEmbeddedMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexedMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexedMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexedMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+import org.hibernate.search.analyzer.Discriminator;
+
+public class IndexedMapping {
+
+ private final SearchMapping mapping;
+ private final EntityDescriptor entity;
+ private final Map<String, Object> indexed;
+ private final EntityMapping entityMapping;
+
+ public IndexedMapping(SearchMapping mapping, EntityDescriptor entity, EntityMapping entityMapping) {
+ this.entityMapping = entityMapping;
+ this.mapping = mapping;
+ this.entity = entity;
+ indexed = new HashMap<String, Object>();
+ entity.setIndexed(indexed);
+ }
+
+ public EntityMapping indexName(String indexName) {
+ this.indexed.put("index", indexName);
+ return entityMapping;
+ }
+
+ public IndexedMapping similarity(Class<?> impl) {
+ Map<String, Object> similarity = new HashMap<String, Object>(1);
+ similarity.put( "impl", impl );
+ entity.setSimilariy(similarity);
+ return this;
+ }
+
+ public IndexedMapping boost(float boost) {
+ final Map<String, Object> boostAnn = new HashMap<String, Object>();
+ boostAnn.put( "value", boost );
+ entity.setBoost(boostAnn);
+ return this;
+ }
+
+ public IndexedMapping analyzerDiscriminator(Class<? extends Discriminator> discriminator) {
+ final Map<String, Object> discriminatorAnn = new HashMap<String, Object>();
+ discriminatorAnn.put( "impl", discriminator );
+ entity.setAnalyzerDiscriminator(discriminatorAnn);
+ return this;
+ }
+
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(mapping, name, impl);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public ProvidedIdMapping providedId() {
+ return new ProvidedIdMapping(mapping,entity);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/IndexedMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyDescriptor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyDescriptor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyDescriptor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,116 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class PropertyDescriptor {
+ private ElementType type;
+ private String name;
+ private Collection<Map<String, Object>> fields = new ArrayList<Map<String, Object>>();
+ private Map<String, Object> dateBridge= new HashMap<String, Object>();
+ private Map<String, Object> calendarBridge= new HashMap<String, Object>();
+ private Map<String,Object> indexEmbedded;
+ private Map<String,Object> containedIn;
+
+ private Map<String, Object> documentId;
+ private Map<String, Object> analyzerDiscriminator;
+ private Map<String, Object> dynamicBoost;
+
+ public PropertyDescriptor(String name, ElementType type) {
+ this.name = name;
+ this.type = type;
+ }
+
+ public void setDocumentId(Map<String, Object> documentId) {
+ this.documentId = documentId;
+ }
+
+ public void addField(Map<String, Object> field) {
+ fields.add( field );
+ }
+
+ public void setDateBridge(Map<String,Object> dateBridge) {
+ this.dateBridge = dateBridge;
+ }
+ public void setCalendarBridge(Map<String,Object> calendarBridge) {
+ this.calendarBridge = calendarBridge;
+ }
+
+ public Collection<Map<String, Object>> getFields() {
+ return fields;
+ }
+
+ public Map<String, Object> getDocumentId() {
+ return documentId;
+ }
+
+ public Map<String, Object> getAnalyzerDiscriminator() {
+ return analyzerDiscriminator;
+ }
+
+
+ public Map<String, Object> getDateBridge() {
+ return dateBridge;
+ }
+ public Map<String, Object> getCalendarBridge() {
+ return calendarBridge;
+ }
+
+
+ public void setAnalyzerDiscriminator(Map<String, Object> analyzerDiscriminator) {
+ this.analyzerDiscriminator = analyzerDiscriminator;
+ }
+
+ public Map<String, Object> getIndexEmbedded() {
+ return indexEmbedded;
+ }
+
+ public void setIndexEmbedded(Map<String, Object> indexEmbedded) {
+ this.indexEmbedded = indexEmbedded;
+ }
+ public Map<String, Object> getContainedIn() {
+ return containedIn;
+ }
+
+ public void setContainedIn(Map<String, Object> containedIn) {
+ this.containedIn = containedIn;
+ }
+
+ public void setDynamicBoost(Map<String, Object> dynamicBoostAnn) {
+ this.dynamicBoost = dynamicBoostAnn;
+ }
+
+ public Map<String,Object> getDynamicBoost() {
+ return this.dynamicBoost;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyDescriptor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,100 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+import org.hibernate.search.analyzer.Discriminator;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class PropertyMapping {
+ private SearchMapping mapping;
+ private EntityDescriptor entity;
+ private PropertyDescriptor property;
+
+ public PropertyMapping(String name, ElementType type, EntityDescriptor entity, SearchMapping mapping) {
+ this.mapping = mapping;
+ this.entity = entity;
+ property = entity.getProperty(name, type);
+ }
+
+ public DocumentIdMapping documentId() {
+ return new DocumentIdMapping( property, entity, mapping );
+ }
+
+ public FieldMapping field() {
+ return new FieldMapping(property, entity, mapping);
+ }
+
+ public DateBridgeMapping dateBridge(Resolution resolution) {
+ return new DateBridgeMapping(mapping, entity, property, resolution);
+ }
+
+ public CalendarBridgeMapping calendarBridge(Resolution resolution) {
+ return new CalendarBridgeMapping(mapping, entity, property, resolution);
+ }
+
+ public PropertyMapping analyzerDiscriminator(Class<? extends Discriminator> discriminator) {
+ Map<String, Object> analyzerDiscriminatorAnn = new HashMap<String, Object>();
+ analyzerDiscriminatorAnn.put( "impl", discriminator );
+ property.setAnalyzerDiscriminator(analyzerDiscriminatorAnn);
+ return this;
+ }
+
+ public PropertyMapping dynamicBoost(Class<? extends BoostStrategy> impl) {
+ final Map<String, Object> dynamicBoostAnn = new HashMap<String, Object>();
+ dynamicBoostAnn.put("impl", impl);
+ property.setDynamicBoost(dynamicBoostAnn);
+ return this;
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public IndexEmbeddedMapping indexEmbedded() {
+ return new IndexEmbeddedMapping(mapping,property,entity);
+ }
+
+ public ContainedInMapping containedIn() {
+ return new ContainedInMapping(mapping, property, entity);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/PropertyMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ProvidedIdMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ProvidedIdMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ProvidedIdMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.lang.annotation.ElementType;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+public class ProvidedIdMapping {
+
+ private final SearchMapping searchMapping;
+ private final Map<String,Object> providedIdMapping;
+ private EntityDescriptor entity;
+
+ public ProvidedIdMapping(SearchMapping searchMapping, EntityDescriptor entity) {
+ this.searchMapping = searchMapping;
+ this.entity =entity;
+ providedIdMapping = new HashMap<String,Object>();
+ entity.setProvidedId(providedIdMapping);
+ }
+
+ public ProvidedIdMapping name(String name) {
+ this.providedIdMapping.put("name", name);
+ return this;
+ }
+
+ public FieldBridgeMapping bridge(Class<?> impl) {
+ return new FieldBridgeMapping( impl, providedIdMapping, null, null, entity, searchMapping );
+ }
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(searchMapping, name, impl);
+ }
+
+ public PropertyMapping property(String name, ElementType type) {
+ return new PropertyMapping(name, type, entity, searchMapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, searchMapping);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, searchMapping);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/ProvidedIdMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfiguration.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfiguration.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfiguration.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.Iterator;
+import java.util.Properties;
+
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+
+/**
+ * Provides configuration to Hibernate Search
+ *
+ * @author Navin Surtani - navin(a)surtani.org
+ */
+public interface SearchConfiguration {
+ /**
+ * Returns an iterator over the list of indexed classes
+ *
+ * @return iterator of indexed classes.
+ */
+ Iterator<Class<?>> getClassMappings();
+
+ /**
+ * Returns a {@link java.lang.Class} from a String parameter.
+ * @param name
+ * @return corresponding class instance.
+ */
+
+ Class<?> getClassMapping(String name);
+
+ /**
+ * Gets a configuration property from its name
+ * or null if not present
+ *
+ * @param propertyName - as a String.
+ * @return the property as a String
+ */
+ String getProperty(String propertyName);
+
+ /**
+ * Gets properties as a java.util.Properties object.
+ *
+ * @return a java.util.Properties object.
+ * @see java.util.Properties object
+ */
+ Properties getProperties();
+
+ /**
+ * Returns a reflection manager if already available in the environment
+ * null otherwise
+ *
+ * @return ReflectionManager
+ */
+ ReflectionManager getReflectionManager();
+
+ /**
+ * returns the programmatic configuration or null
+ * //TODO remove hard dep with solr classes
+ */
+ SearchMapping getProgrammaticMapping();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfiguration.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,121 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Properties;
+
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.mapping.PersistentClass;
+
+/**
+ * Search configuration implementation wrapping an Hibernate Core configuration
+ *
+ * @author Emmanuel Bernard
+ */
+public class SearchConfigurationFromHibernateCore implements SearchConfiguration {
+ private final org.hibernate.cfg.Configuration cfg;
+ private ReflectionManager reflectionManager;
+
+ public SearchConfigurationFromHibernateCore(org.hibernate.cfg.Configuration cfg) {
+ if ( cfg == null ) throw new NullPointerException( "Configuration is null" );
+ this.cfg = cfg;
+ }
+
+ public Iterator<Class<?>> getClassMappings() {
+ return new ClassIterator( cfg.getClassMappings() );
+ }
+
+ public Class<?> getClassMapping(String name) {
+ return cfg.getClassMapping( name ).getMappedClass();
+ }
+
+ public String getProperty(String propertyName) {
+ return cfg.getProperty( propertyName );
+ }
+
+ public Properties getProperties() {
+ return cfg.getProperties();
+ }
+
+ public ReflectionManager getReflectionManager() {
+ if ( reflectionManager == null ) {
+ try {
+ //TODO introduce a ReflectionManagerHolder interface to avoid reflection
+ //I want to avoid hard link between HAN and Validator for such a simple need
+ //reuse the existing reflectionManager one when possible
+ reflectionManager =
+ (ReflectionManager) cfg.getClass().getMethod( "getReflectionManager" ).invoke( cfg );
+
+ }
+ catch (Exception e) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ }
+ return reflectionManager;
+ }
+
+ public SearchMapping getProgrammaticMapping() {
+ return null;
+ }
+
+ private static class ClassIterator implements Iterator<Class<?>> {
+ private Iterator hibernatePersistentClassIterator;
+ private Class<?> future;
+
+ private ClassIterator(Iterator hibernatePersistentClassIterator) {
+ this.hibernatePersistentClassIterator = hibernatePersistentClassIterator;
+ }
+
+ public boolean hasNext() {
+ //we need to read the next non null one. getMappedClass() can return null and should be ignored
+ if ( future != null) return true;
+ do {
+ if ( ! hibernatePersistentClassIterator.hasNext() ) {
+ future = null;
+ return false;
+ }
+ final PersistentClass pc = (PersistentClass) hibernatePersistentClassIterator.next();
+ future = pc.getMappedClass();
+ }
+ while ( future == null );
+ return true;
+ }
+
+ public Class<?> next() {
+ //run hasNext to init the next element
+ if ( ! hasNext() ) throw new NoSuchElementException();
+ Class<?> result = future;
+ future = null;
+ return result;
+ }
+
+ public void remove() {
+ throw new UnsupportedOperationException( "Cannot modify Hibenrate Core metadata" );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchConfigurationFromHibernateCore.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,102 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.solr.analysis.TokenizerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SearchMapping {
+ private Set<Map<String, Object>> analyzerDefs = new HashSet<Map<String, Object>>();
+ private Set<Map<String, Object>> fullTextFilterDefs = new HashSet<Map<String, Object>>();
+ private Map<Class<?>, EntityDescriptor> entities = new HashMap<Class<?>, EntityDescriptor>();
+
+ public Set<Map<String, Object>> getAnalyzerDefs() {
+ return analyzerDefs;
+ }
+
+ public Set<Map<String, Object>> getFullTextFilerDefs() {
+ return fullTextFilterDefs;
+ }
+
+ public EntityDescriptor getEntityDescriptor(Class<?> entityType) {
+ return entities.get( entityType );
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, this);
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, this);
+ }
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(this, name, impl );
+ }
+
+ /**
+ * eg @Containing(things={@Thing(...), @Thing(...)}
+ * Map<String, Object> addedThing = addElementToAnnotationArray(containing, "things");
+ */
+
+ static Map<String, Object> addElementToAnnotationArray(Map<String, Object> containingAnnotation,
+ String attributeName) {
+ @SuppressWarnings("unchecked") List<Map<String, Object>> array = (List<Map<String, Object>>) containingAnnotation.get( attributeName );
+ if ( array == null) {
+ array = new ArrayList<Map<String, Object>>();
+ containingAnnotation.put( attributeName, array );
+ }
+ Map<String, Object> param = new HashMap<String, Object>();
+ array.add( param );
+ return param;
+ }
+
+ void addAnalyzerDef(Map<String, Object> analyzerDef) {
+ analyzerDefs.add( analyzerDef );
+ }
+
+ EntityDescriptor getEntity(Class<?> entityType) {
+ EntityDescriptor entity = entities.get( entityType );
+ if (entity == null) {
+ entity = new EntityDescriptor(entityType);
+ entities.put( entityType, entity );
+ }
+ return entity;
+ }
+
+ void addFulltextFilterDef(Map<String, Object> fullTextFilterDef) {
+ fullTextFilterDefs.add(fullTextFilterDef);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/SearchMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/TokenFilterDefMapping.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/TokenFilterDefMapping.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/TokenFilterDefMapping.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,76 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.cfg;
+
+import java.util.Map;
+
+import org.apache.solr.analysis.TokenFilterFactory;
+import org.apache.solr.analysis.TokenizerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TokenFilterDefMapping {
+ private Map<String, Object> filter;
+ private Map<String, Object> analyzerDef;
+ private SearchMapping mapping;
+
+ TokenFilterDefMapping(Class<? extends TokenFilterFactory> factory, Map<String, Object> analyzerDef, SearchMapping mapping) {
+ this.mapping = mapping;
+ this.analyzerDef = analyzerDef;
+ this.filter = SearchMapping.addElementToAnnotationArray( analyzerDef, "filters" );
+ filter.put( "factory", factory );
+ }
+
+ /**
+ * @TokenFilterDef(, ... params={@Parameter(name="name", value="value"), ...})
+ */
+ public TokenFilterDefMapping param(String name, String value) {
+ Map<String, Object> param = SearchMapping.addElementToAnnotationArray(filter, "params");
+ param.put("name", name);
+ param.put("value", value);
+ return this;
+ }
+
+ /**
+ * @TokenFilterDef(factory=factory)
+ */
+ public TokenFilterDefMapping filter(Class<? extends TokenFilterFactory> factory) {
+ return new TokenFilterDefMapping(factory, analyzerDef, mapping );
+ }
+
+ public EntityMapping entity(Class<?> entityType) {
+ return new EntityMapping(entityType, mapping);
+ }
+
+ public AnalyzerDefMapping analyzerDef(String name, Class<? extends TokenizerFactory> tokenizerFactory) {
+ return new AnalyzerDefMapping(name, tokenizerFactory, mapping);
+ }
+
+ public FullTextFilterDefMapping fullTextFilterDef(String name, Class<?> impl) {
+ return new FullTextFilterDefMapping(mapping, name, impl );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/cfg/TokenFilterDefMapping.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/BoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/BoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/BoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+/**
+ * Interface to implement boost values as functions
+ * of the object value being boosted.
+ * Implementations must be threadsafe.
+ *
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.annotations.Boost
+ */
+public interface BoostStrategy {
+ public float defineBoost(Object value);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/BoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DefaultBoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DefaultBoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DefaultBoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,35 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class DefaultBoostStrategy implements BoostStrategy {
+
+ public float defineBoost(Object value) {
+ return 1.0f;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DefaultBoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,41 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import org.hibernate.search.ProjectionConstants;
+
+/**
+ * Interface created to keep backwards compatibility.
+ *
+ * @author Hardy Ferentschik
+ */
+public interface DocumentBuilder {
+
+ /**
+ * Lucene document field name containing the fully qualified classname of the indexed class.
+ *
+ */
+ String CLASS_FIELDNAME = ProjectionConstants.OBJECT_CLASS;
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,827 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.search.Similarity;
+import org.slf4j.Logger;
+
+import org.hibernate.Hibernate;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XAnnotatedElement;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.annotations.common.reflection.XProperty;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.analyzer.Discriminator;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.AnalyzerDiscriminator;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.ClassBridges;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.DynamicBoost;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.bridge.BridgeFactory;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.impl.InitContext;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.ReflectionHelper;
+import org.hibernate.search.util.ScopedAnalyzer;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Set up and provide a manager for classes which are indexed via <code>@IndexedEmbedded</code>, but themselves do not
+ * contain the <code>@Indexed</code> annotation.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Richard Hallier
+ * @author Hardy Ferentschik
+ */
+public class DocumentBuilderContainedEntity<T> implements DocumentBuilder {
+ private static final Logger log = LoggerFactory.make();
+
+ protected final PropertiesMetadata metadata = new PropertiesMetadata();
+ protected final XClass beanClass;
+ protected Set<Class<?>> mappedSubclasses = new HashSet<Class<?>>();
+ protected ReflectionManager reflectionManager; //available only during initialization and post-initialization
+ protected int level = 0;
+ protected int maxLevel = Integer.MAX_VALUE;
+ protected final ScopedAnalyzer analyzer = new ScopedAnalyzer();
+ protected Similarity similarity; //there is only 1 similarity per class hierarchy, and only 1 per index
+ protected boolean isRoot;
+ protected EntityState entityState;
+
+ /**
+ * Constructor used on contained entities not annotated with <code>@Indexed</code> themselves.
+ *
+ * @param clazz The class for which to build a <code>DocumentBuilderContainedEntity</code>.
+ * @param context Handle to default configuration settings.
+ * @param reflectionManager Reflection manager to use for processing the annotations.
+ */
+ public DocumentBuilderContainedEntity(XClass clazz, InitContext context, ReflectionManager reflectionManager) {
+
+ if ( clazz == null ) {
+ throw new AssertionFailure( "Unable to build a DocumentBuilderContainedEntity with a null class" );
+ }
+
+ this.entityState = EntityState.CONTAINED_IN_ONLY;
+ this.beanClass = clazz;
+ this.reflectionManager = reflectionManager;
+
+ init( clazz, context );
+
+ if ( metadata.containedInGetters.size() == 0 ) {
+ this.entityState = EntityState.NON_INDEXABLE;
+ }
+ }
+
+ protected void init(XClass clazz, InitContext context) {
+ metadata.boost = getBoost( clazz );
+ metadata.classBoostStrategy = getDynamicBoost( clazz );
+ metadata.analyzer = context.getDefaultAnalyzer();
+
+ Set<XClass> processedClasses = new HashSet<XClass>();
+ processedClasses.add( clazz );
+ initializeClass( clazz, metadata, true, "", processedClasses, context );
+
+ this.analyzer.setGlobalAnalyzer( metadata.analyzer );
+
+ // set the default similarity in case that after processing all classes there is still no similarity set
+ if ( this.similarity == null ) {
+ this.similarity = context.getDefaultSimilarity();
+ }
+ }
+
+ public boolean isRoot() {
+ return isRoot;
+ }
+
+ private void initializeClass(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix,
+ Set<XClass> processedClasses, InitContext context) {
+ List<XClass> hierarchy = new ArrayList<XClass>();
+ for ( XClass currentClass = clazz; currentClass != null; currentClass = currentClass.getSuperclass() ) {
+ hierarchy.add( currentClass );
+ }
+
+ /*
+ * Iterate the class hierarchy top down. This allows to override the default analyzer for the properties if the class holds one
+ */
+ for ( int index = hierarchy.size() - 1; index >= 0; index-- ) {
+ XClass currentClass = hierarchy.get( index );
+
+ initializeClassLevelAnnotations( currentClass, propertiesMetadata, isRoot, prefix, context );
+
+ // rejecting non properties (ie regular methods) because the object is loaded from Hibernate,
+ // so indexing a non property does not make sense
+ List<XProperty> methods = currentClass.getDeclaredProperties( XClass.ACCESS_PROPERTY );
+ for ( XProperty method : methods ) {
+ initializeMemberLevelAnnotations(
+ method, propertiesMetadata, isRoot, prefix, processedClasses, context
+ );
+ }
+
+ List<XProperty> fields = currentClass.getDeclaredProperties( XClass.ACCESS_FIELD );
+ for ( XProperty field : fields ) {
+ initializeMemberLevelAnnotations(
+ field, propertiesMetadata, isRoot, prefix, processedClasses, context
+ );
+ }
+ }
+ }
+
+ /**
+ * Check and initialize class level annotations.
+ *
+ * @param clazz The class to process.
+ * @param propertiesMetadata The meta data holder.
+ * @param isRoot Flag indicating if the specified class is a root entity, meaning the start of a chain of indexed
+ * entities.
+ * @param prefix The current prefix used for the <code>Document</code> field names.
+ * @param context Handle to default configuration settings.
+ */
+ private void initializeClassLevelAnnotations(XClass clazz, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix, InitContext context) {
+
+ // check for a class level specified analyzer
+ Analyzer analyzer = getAnalyzer( clazz, context );
+ if ( analyzer != null ) {
+ propertiesMetadata.analyzer = analyzer;
+ }
+
+ // check for AnalyzerDefs annotations
+ checkForAnalyzerDefs( clazz, context );
+
+ // Check for any ClassBridges annotation.
+ ClassBridges classBridgesAnn = clazz.getAnnotation( ClassBridges.class );
+ if ( classBridgesAnn != null ) {
+ ClassBridge[] classBridges = classBridgesAnn.value();
+ for ( ClassBridge cb : classBridges ) {
+ bindClassBridgeAnnotation( prefix, propertiesMetadata, cb, context );
+ }
+ }
+
+ // Check for any ClassBridge style of annotations.
+ ClassBridge classBridgeAnn = clazz.getAnnotation( ClassBridge.class );
+ if ( classBridgeAnn != null ) {
+ bindClassBridgeAnnotation( prefix, propertiesMetadata, classBridgeAnn, context );
+ }
+
+ checkForAnalyzerDiscriminator( clazz, propertiesMetadata );
+
+ // Get similarity
+ if ( isRoot ) {
+ checkForSimilarity( clazz );
+ }
+ }
+
+ protected void initializeMemberLevelAnnotations(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot,
+ String prefix, Set<XClass> processedClasses, InitContext context) {
+ checkDocumentId( member, propertiesMetadata, isRoot, prefix, context );
+ checkForField( member, propertiesMetadata, prefix, context );
+ checkForFields( member, propertiesMetadata, prefix, context );
+ checkForAnalyzerDefs( member, context );
+ checkForAnalyzerDiscriminator( member, propertiesMetadata );
+ checkForIndexedEmbedded( member, propertiesMetadata, prefix, processedClasses, context );
+ checkForContainedIn( member, propertiesMetadata );
+ }
+
+ protected Analyzer getAnalyzer(XAnnotatedElement annotatedElement, InitContext context) {
+ org.hibernate.search.annotations.Analyzer analyzerAnn =
+ annotatedElement.getAnnotation( org.hibernate.search.annotations.Analyzer.class );
+ return getAnalyzer( analyzerAnn, context );
+ }
+
+ protected Analyzer getAnalyzer(org.hibernate.search.annotations.Analyzer analyzerAnn, InitContext context) {
+ Class analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
+ if ( analyzerClass == void.class ) {
+ String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
+ if ( StringHelper.isEmpty( definition ) ) {
+ return null;
+ }
+ else {
+
+ return context.buildLazyAnalyzer( definition );
+ }
+ }
+ else {
+ try {
+ return ( Analyzer ) analyzerClass.newInstance();
+ }
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "Lucene analyzer does not implement " + Analyzer.class.getName() + ": " + analyzerClass.getName(),
+ e
+ );
+ }
+ catch ( Exception e ) {
+ throw new SearchException(
+ "Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e
+ );
+ }
+ }
+ }
+
+ private void checkForAnalyzerDefs(XAnnotatedElement annotatedElement, InitContext context) {
+ AnalyzerDefs defs = annotatedElement.getAnnotation( AnalyzerDefs.class );
+ if ( defs != null ) {
+ for ( AnalyzerDef def : defs.value() ) {
+ context.addAnalyzerDef( def );
+ }
+ }
+ AnalyzerDef def = annotatedElement.getAnnotation( AnalyzerDef.class );
+ context.addAnalyzerDef( def );
+ }
+
+ private void checkForAnalyzerDiscriminator(XAnnotatedElement annotatedElement, PropertiesMetadata propertiesMetadata) {
+ AnalyzerDiscriminator discriminatorAnn = annotatedElement.getAnnotation( AnalyzerDiscriminator.class );
+ if ( discriminatorAnn != null ) {
+ if ( propertiesMetadata.discriminator != null ) {
+ throw new SearchException(
+ "Multiple AnalyzerDiscriminator defined in the same class hierarchy: " + beanClass.getName()
+ );
+ }
+
+ Class<? extends Discriminator> discriminatorClass = discriminatorAnn.impl();
+ try {
+ propertiesMetadata.discriminator = discriminatorClass.newInstance();
+ }
+ catch ( Exception e ) {
+ throw new SearchException(
+ "Unable to instantiate analyzer discriminator implementation: " + discriminatorClass.getName()
+ );
+ }
+
+ if ( annotatedElement instanceof XMember ) {
+ propertiesMetadata.discriminatorGetter = ( XMember ) annotatedElement;
+ }
+ }
+ }
+
+ public Similarity getSimilarity() {
+ return similarity;
+ }
+
+ private void checkForFields(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, InitContext context) {
+ org.hibernate.search.annotations.Fields fieldsAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Fields.class );
+ if ( fieldsAnn != null ) {
+ for ( org.hibernate.search.annotations.Field fieldAnn : fieldsAnn.value() ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+ }
+
+ private void checkForSimilarity(XClass currClass) {
+ org.hibernate.search.annotations.Similarity similarityAnn = currClass.getAnnotation( org.hibernate.search.annotations.Similarity.class );
+ if ( similarityAnn != null ) {
+ if ( similarity != null ) {
+ throw new SearchException(
+ "Multiple Similarities defined in the same class hierarchy: " + beanClass.getName()
+ );
+ }
+ Class similarityClass = similarityAnn.impl();
+ try {
+ similarity = ( Similarity ) similarityClass.newInstance();
+ }
+ catch ( Exception e ) {
+ log.error(
+ "Exception attempting to instantiate Similarity '{}' set for {}",
+ similarityClass.getName(), beanClass.getName()
+ );
+ }
+ }
+ }
+
+ private void checkForField(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, InitContext context) {
+ org.hibernate.search.annotations.Field fieldAnn =
+ member.getAnnotation( org.hibernate.search.annotations.Field.class );
+ if ( fieldAnn != null ) {
+ bindFieldAnnotation( member, propertiesMetadata, prefix, fieldAnn, context );
+ }
+ }
+
+ private void checkForContainedIn(XProperty member, PropertiesMetadata propertiesMetadata) {
+ ContainedIn containedAnn = member.getAnnotation( ContainedIn.class );
+ if ( containedAnn != null ) {
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.containedInGetters.add( member );
+ }
+ }
+
+ private void checkForIndexedEmbedded(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, Set<XClass> processedClasses, InitContext context) {
+ IndexedEmbedded embeddedAnn = member.getAnnotation( IndexedEmbedded.class );
+ if ( embeddedAnn != null ) {
+ int oldMaxLevel = maxLevel;
+ int potentialLevel = embeddedAnn.depth() + level;
+ if ( potentialLevel < 0 ) {
+ potentialLevel = Integer.MAX_VALUE;
+ }
+ maxLevel = potentialLevel > maxLevel ? maxLevel : potentialLevel;
+ level++;
+
+ XClass elementClass;
+ if ( void.class == embeddedAnn.targetElement() ) {
+ elementClass = member.getElementClass();
+ }
+ else {
+ elementClass = reflectionManager.toXClass( embeddedAnn.targetElement() );
+ }
+ if ( maxLevel == Integer.MAX_VALUE //infinite
+ && processedClasses.contains( elementClass ) ) {
+ throw new SearchException(
+ "Circular reference. Duplicate use of "
+ + elementClass.getName()
+ + " in root entity " + beanClass.getName()
+ + "#" + buildEmbeddedPrefix( prefix, embeddedAnn, member )
+ );
+ }
+ if ( level <= maxLevel ) {
+ processedClasses.add( elementClass ); //push
+
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.embeddedGetters.add( member );
+ PropertiesMetadata metadata = new PropertiesMetadata();
+ propertiesMetadata.embeddedPropertiesMetadata.add( metadata );
+ metadata.boost = getBoost( member, null );
+ //property > entity analyzer
+ Analyzer analyzer = getAnalyzer( member, context );
+ metadata.analyzer = analyzer != null ? analyzer : propertiesMetadata.analyzer;
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ initializeClass( elementClass, metadata, false, localPrefix, processedClasses, context );
+ /**
+ * We will only index the "expected" type but that's OK, HQL cannot do down-casting either
+ */
+ if ( member.isArray() ) {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.ARRAY );
+ }
+ else if ( member.isCollection() ) {
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum subclasses etc etc??
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.MAP );
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.COLLECTION );
+ }
+ }
+ else {
+ propertiesMetadata.embeddedContainers.add( PropertiesMetadata.Container.OBJECT );
+ }
+
+ processedClasses.remove( elementClass ); //pop
+ }
+ else if ( log.isTraceEnabled() ) {
+ String localPrefix = buildEmbeddedPrefix( prefix, embeddedAnn, member );
+ log.trace( "depth reached, ignoring {}", localPrefix );
+ }
+
+ level--;
+ maxLevel = oldMaxLevel; //set back the the old max level
+ }
+ }
+
+ protected void checkDocumentId(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix, InitContext context) {
+ Annotation documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ log.warn(
+ "@DocumentId specified on an entity which is not indexed by itself. Annotation gets ignored. Use @Field instead."
+ );
+ }
+ }
+
+ /**
+ * Determines the property name for the document id. It is either the name of the property itself or the
+ * value of the name attribute of the <code>idAnnotation</code>.
+ *
+ * @param member the property used as id property.
+ * @param idAnnotation the id annotation
+ *
+ * @return property name to be used as document id.
+ */
+ protected String getIdAttributeName(XProperty member, Annotation idAnnotation) {
+ String name = null;
+ try {
+ Method m = idAnnotation.getClass().getMethod( "name" );
+ name = ( String ) m.invoke( idAnnotation );
+ }
+ catch ( Exception e ) {
+ // ignore
+ }
+
+ return ReflectionHelper.getAttributeName( member, name );
+ }
+
+ private void bindClassBridgeAnnotation(String prefix, PropertiesMetadata propertiesMetadata, ClassBridge ann, InitContext context) {
+ String fieldName = prefix + ann.name();
+ propertiesMetadata.classNames.add( fieldName );
+ propertiesMetadata.classStores.add( getStore( ann.store() ) );
+ propertiesMetadata.classIndexes.add( getIndex( ann.index() ) );
+ propertiesMetadata.classTermVectors.add( getTermVector( ann.termVector() ) );
+ propertiesMetadata.classBridges.add( BridgeFactory.extractType( ann ) );
+ propertiesMetadata.classBoosts.add( ann.boost().value() );
+
+ Analyzer analyzer = getAnalyzer( ann.analyzer(), context );
+ if ( analyzer == null ) {
+ analyzer = propertiesMetadata.analyzer;
+ }
+ if ( analyzer == null ) {
+ throw new AssertionFailure( "Analyzer should not be undefined" );
+ }
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+
+ private void bindFieldAnnotation(XProperty member, PropertiesMetadata propertiesMetadata, String prefix, org.hibernate.search.annotations.Field fieldAnn, InitContext context) {
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + ReflectionHelper.getAttributeName( member, fieldAnn.name() );
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( fieldAnn.store() ) );
+ propertiesMetadata.fieldIndex.add( getIndex( fieldAnn.index() ) );
+ propertiesMetadata.fieldBoosts.add( getBoost( member, fieldAnn ) );
+ propertiesMetadata.dynamicFieldBoosts.add( getDynamicBoost( member ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( fieldAnn.termVector() ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( fieldAnn, member, reflectionManager ) );
+
+ // Field > property > entity analyzer
+ Analyzer analyzer = getAnalyzer( fieldAnn.analyzer(), context );
+ if ( analyzer == null ) {
+ analyzer = getAnalyzer( member, context );
+ }
+ if ( analyzer != null ) {
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+
+ protected Float getBoost(XProperty member, org.hibernate.search.annotations.Field fieldAnn) {
+ float computedBoost = 1.0f;
+ Boost boostAnn = member.getAnnotation( Boost.class );
+ if ( boostAnn != null ) {
+ computedBoost = boostAnn.value();
+ }
+ if ( fieldAnn != null ) {
+ computedBoost *= fieldAnn.boost().value();
+ }
+ return computedBoost;
+ }
+
+ protected BoostStrategy getDynamicBoost(XProperty member) {
+ DynamicBoost boostAnnotation = member.getAnnotation( DynamicBoost.class );
+ if ( boostAnnotation == null ) {
+ return new DefaultBoostStrategy();
+ }
+
+ Class<? extends BoostStrategy> boostStrategyClass = boostAnnotation.impl();
+ BoostStrategy strategy;
+ try {
+ strategy = boostStrategyClass.newInstance();
+ }
+ catch ( Exception e ) {
+ throw new SearchException(
+ "Unable to instantiate boost strategy implementation: " + boostStrategyClass.getName()
+ );
+ }
+ return strategy;
+ }
+
+ private String buildEmbeddedPrefix(String prefix, IndexedEmbedded embeddedAnn, XProperty member) {
+ String localPrefix = prefix;
+ if ( ".".equals( embeddedAnn.prefix() ) ) {
+ //default to property name
+ localPrefix += member.getName() + '.';
+ }
+ else {
+ localPrefix += embeddedAnn.prefix();
+ }
+ return localPrefix;
+ }
+
+ protected Field.Store getStore(Store store) {
+ switch ( store ) {
+ case NO:
+ return Field.Store.NO;
+ case YES:
+ return Field.Store.YES;
+ case COMPRESS:
+ return Field.Store.COMPRESS;
+ default:
+ throw new AssertionFailure( "Unexpected Store: " + store );
+ }
+ }
+
+ protected Field.TermVector getTermVector(TermVector vector) {
+ switch ( vector ) {
+ case NO:
+ return Field.TermVector.NO;
+ case YES:
+ return Field.TermVector.YES;
+ case WITH_OFFSETS:
+ return Field.TermVector.WITH_OFFSETS;
+ case WITH_POSITIONS:
+ return Field.TermVector.WITH_POSITIONS;
+ case WITH_POSITION_OFFSETS:
+ return Field.TermVector.WITH_POSITIONS_OFFSETS;
+ default:
+ throw new AssertionFailure( "Unexpected TermVector: " + vector );
+ }
+ }
+
+ protected Field.Index getIndex(Index index) {
+ switch ( index ) {
+ case NO:
+ return Field.Index.NO;
+ case NO_NORMS:
+ return Field.Index.NOT_ANALYZED_NO_NORMS;
+ case TOKENIZED:
+ return Field.Index.ANALYZED;
+ case UN_TOKENIZED:
+ return Field.Index.NOT_ANALYZED;
+ default:
+ throw new AssertionFailure( "Unexpected Index: " + index );
+ }
+ }
+
+ protected float getBoost(XClass element) {
+ float boost = 1.0f;
+ if ( element == null ) {
+ return boost;
+ }
+ Boost boostAnnotation = element.getAnnotation( Boost.class );
+ if ( boostAnnotation != null ) {
+ boost = boostAnnotation.value();
+ }
+ return boost;
+ }
+
+ protected BoostStrategy getDynamicBoost(XClass element) {
+ if ( element == null ) {
+ return null;
+ }
+ DynamicBoost boostAnnotation = element.getAnnotation( DynamicBoost.class );
+ if ( boostAnnotation == null ) {
+ return new DefaultBoostStrategy();
+ }
+
+ Class<? extends BoostStrategy> boostStrategyClass = boostAnnotation.impl();
+ BoostStrategy strategy;
+ try {
+ strategy = boostStrategyClass.newInstance();
+ }
+ catch ( Exception e ) {
+ throw new SearchException(
+ "Unable to instantiate boost strategy implementation: " + boostStrategyClass.getName()
+ );
+ }
+ return strategy;
+ }
+
+ public void addWorkToQueue(Class<T> entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
+ /**
+ * When references are changed, either null or another one, we expect dirty checking to be triggered (both sides
+ * have to be updated)
+ * When the internal object is changed, we apply the {Add|Update}Work on containedIns
+ */
+ if ( workType.searchForContainers() ) {
+ processContainedInInstances( entity, queue, metadata, searchFactoryImplementor );
+ }
+ }
+
+ /**
+ * If we have a work instance we have to check whether the instance to be indexed is contained in any other indexed entities.
+ *
+ * @param instance The instance to be indexed
+ * @param queue the current work queue
+ * @param metadata metadata
+ * @param searchFactoryImplementor the current session
+ */
+ private <T> void processContainedInInstances(Object instance, List<LuceneWork> queue, PropertiesMetadata metadata, SearchFactoryImplementor searchFactoryImplementor) {
+ for ( int i = 0; i < metadata.containedInGetters.size(); i++ ) {
+ XMember member = metadata.containedInGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( instance, member );
+
+ if ( value == null ) {
+ continue;
+ }
+
+ if ( member.isArray() ) {
+ @SuppressWarnings("unchecked")
+ T[] array = ( T[] ) value;
+ for ( T arrayValue : array ) {
+ processSingleContainedInInstance( queue, searchFactoryImplementor, arrayValue );
+ }
+ }
+ else if ( member.isCollection() ) {
+ Collection<T> collection = getActualCollection( member, value );
+ for ( T collectionValue : collection ) {
+ processSingleContainedInInstance( queue, searchFactoryImplementor, collectionValue );
+ }
+ }
+ else {
+ processSingleContainedInInstance( queue, searchFactoryImplementor, value );
+ }
+ }
+ }
+
+ /**
+ * A {@code XMember } instance treats a map as a collection as well in which case the map values are returned as
+ * collection.
+ *
+ * @param member The member instance
+ * @param value The value
+ *
+ * @return The {@code value} casted to collection or in case of {@code value} being a map the map values as collection.
+ */
+ private <T> Collection<T> getActualCollection(XMember member, Object value) {
+ Collection<T> collection;
+ if ( Map.class.equals( member.getCollectionClass() ) ) {
+ //hum
+ @SuppressWarnings("unchecked")
+ Collection<T> tmpCollection = ( ( Map<?, T> ) value ).values();
+ collection = tmpCollection;
+ }
+ else {
+ @SuppressWarnings("unchecked")
+ Collection<T> tmpCollection = ( Collection<T> ) value;
+ collection = tmpCollection;
+ }
+ return collection;
+ }
+
+ private <T> void processSingleContainedInInstance(List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor, T value) {
+ @SuppressWarnings("unchecked")
+ Class<T> valueClass = Hibernate.getClass( value );
+ DocumentBuilderIndexedEntity<T> builderIndexedEntity =
+ searchFactoryImplementor.getDocumentBuilderIndexedEntity( valueClass );
+
+ // it could be we have a nested @IndexedEmbedded chain in which case we have to find the top level @Indexed entities
+ if ( builderIndexedEntity == null ) {
+ DocumentBuilderContainedEntity<T> builderContainedEntity =
+ searchFactoryImplementor.getDocumentBuilderContainedEntity( valueClass );
+ if ( builderContainedEntity != null ) {
+ processContainedInInstances( value, queue, builderContainedEntity.metadata, searchFactoryImplementor );
+ }
+ }
+ else {
+ addWorkForEmbeddedValue( value, queue, valueClass, builderIndexedEntity, searchFactoryImplementor );
+ }
+ }
+
+ /**
+ * Create a {@code LuceneWork} instance of the entity which needs updating due to the embedded instance change.
+ *
+ * @param value The value to index
+ * @param queue The current (Lucene) work queue
+ * @param valueClass The class of the value
+ * @param builderIndexedEntity the document builder for the entity which needs updating due to a update event of the embedded instance
+ * @param searchFactoryImplementor the search factory.
+ */
+ private <T> void addWorkForEmbeddedValue(T value, List<LuceneWork> queue, Class<T> valueClass,
+ DocumentBuilderIndexedEntity<T> builderIndexedEntity, SearchFactoryImplementor searchFactoryImplementor) {
+ Serializable id = ( Serializable ) ReflectionHelper.getMemberValue( value, builderIndexedEntity.idGetter );
+ builderIndexedEntity.addWorkToQueue( valueClass, value, id, WorkType.UPDATE, queue, searchFactoryImplementor );
+ }
+
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
+
+ public void postInitialize(Set<Class<?>> indexedClasses) {
+ if ( entityState == EntityState.NON_INDEXABLE ) {
+ throw new AssertionFailure( "A non indexed entity is post processed" );
+ }
+ //this method does not requires synchronization
+ Class plainClass = reflectionManager.toClass( beanClass );
+ Set<Class<?>> tempMappedSubclasses = new HashSet<Class<?>>();
+ //together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
+ for ( Class currentClass : indexedClasses ) {
+ if ( plainClass != currentClass && plainClass.isAssignableFrom( currentClass ) ) {
+ tempMappedSubclasses.add( currentClass );
+ }
+ }
+ this.mappedSubclasses = Collections.unmodifiableSet( tempMappedSubclasses );
+ Class superClass = plainClass.getSuperclass();
+ this.isRoot = true;
+ while ( superClass != null ) {
+ if ( indexedClasses.contains( superClass ) ) {
+ this.isRoot = false;
+ break;
+ }
+ superClass = superClass.getSuperclass();
+ }
+ this.reflectionManager = null;
+ }
+
+ public EntityState getEntityState() {
+ return entityState;
+ }
+
+ public Set<Class<?>> getMappedSubclasses() {
+ return mappedSubclasses;
+ }
+
+ /**
+ * Wrapper class containing all the meta data extracted out of a single entity.
+ * All field/property related properties are kept in lists. Retrieving all metadata for a given
+ * property/field means accessing all the lists with the same index.
+ */
+ protected static class PropertiesMetadata {
+ public float boost;
+ public Analyzer analyzer;
+ public Discriminator discriminator;
+ public XMember discriminatorGetter;
+ public BoostStrategy classBoostStrategy;
+
+ public final List<String> fieldNames = new ArrayList<String>();
+ public final List<XMember> fieldGetters = new ArrayList<XMember>();
+ public final List<FieldBridge> fieldBridges = new ArrayList<FieldBridge>();
+ public final List<Field.Store> fieldStore = new ArrayList<Field.Store>();
+ public final List<Field.Index> fieldIndex = new ArrayList<Field.Index>();
+ public final List<Float> fieldBoosts = new ArrayList<Float>();
+ public final List<BoostStrategy> dynamicFieldBoosts = new ArrayList<BoostStrategy>();
+
+ public final List<Field.TermVector> fieldTermVectors = new ArrayList<Field.TermVector>();
+ public final List<XMember> embeddedGetters = new ArrayList<XMember>();
+ public final List<PropertiesMetadata> embeddedPropertiesMetadata = new ArrayList<PropertiesMetadata>();
+ public final List<Container> embeddedContainers = new ArrayList<Container>();
+ public final List<XMember> containedInGetters = new ArrayList<XMember>();
+
+ public final List<String> classNames = new ArrayList<String>();
+ public final List<Field.Store> classStores = new ArrayList<Field.Store>();
+ public final List<Field.Index> classIndexes = new ArrayList<Field.Index>();
+ public final List<FieldBridge> classBridges = new ArrayList<FieldBridge>();
+ public final List<Field.TermVector> classTermVectors = new ArrayList<Field.TermVector>();
+ public final List<Float> classBoosts = new ArrayList<Float>();
+
+ public enum Container {
+ OBJECT,
+ COLLECTION,
+ MAP,
+ ARRAY
+ }
+
+ protected LuceneOptions getClassLuceneOptions(int i) {
+ return new LuceneOptionsImpl(
+ classStores.get( i ),
+ classIndexes.get( i ), classTermVectors.get( i ), classBoosts.get( i )
+ );
+ }
+
+ protected LuceneOptions getFieldLuceneOptions(int i, Object value) {
+ LuceneOptions options;
+ options = new LuceneOptionsImpl(
+ fieldStore.get( i ),
+ fieldIndex.get( i ),
+ fieldTermVectors.get( i ),
+ fieldBoosts.get( i ) * dynamicFieldBoosts.get( i ).defineBoost( value )
+ );
+ return options;
+ }
+
+ protected float getClassBoost(Object value) {
+ return boost * classBoostStrategy.defineBoost( value );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderContainedEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,747 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.lang.annotation.Annotation;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Term;
+import org.slf4j.Logger;
+
+import org.hibernate.Hibernate;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.annotations.common.util.ReflectHelper;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.annotations.common.reflection.XProperty;
+import org.hibernate.proxy.HibernateProxy;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.analyzer.Discriminator;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.ProvidedId;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.PurgeAllLuceneWork;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.bridge.BridgeFactory;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.TwoWayFieldBridge;
+import org.hibernate.search.bridge.TwoWayString2FieldBridgeAdaptor;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+import org.hibernate.search.impl.InitContext;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.ReflectionHelper;
+
+/**
+ * Set up and provide a manager for classes which are directly annotated with <code>@Indexed</code>.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Richard Hallier
+ * @author Hardy Ferentschik
+ */
+public class DocumentBuilderIndexedEntity<T> extends DocumentBuilderContainedEntity<T> {
+ private static final Logger log = LoggerFactory.make();
+
+ /**
+ * Arrays of directory providers for the underlying Lucene indexes of the indexed entity.
+ */
+ private final DirectoryProvider[] directoryProviders;
+
+ /**
+ * The sharding strategy used for the indexed entity.
+ */
+ private final IndexShardingStrategy shardingStrategy;
+
+ /**
+ * Flag indicating whether <code>@DocumentId</code> was explicitly specified.
+ */
+ private boolean explicitDocumentId = false;
+
+ /**
+ * Flag indicating whether {@link org.apache.lucene.search.Searcher#doc(int, org.apache.lucene.document.FieldSelector)}
+ * can be used in order to retrieve documents. This is only safe to do if we know that
+ * all involved bridges are implementing <code>TwoWayStringBridge</code>. See HSEARCH-213.
+ */
+ private boolean allowFieldSelectionInProjection = false;
+
+ /**
+ * The class member used as document id.
+ */
+ protected XMember idGetter;
+
+ /**
+ * Name of the document id field.
+ */
+ protected String idKeywordName;
+
+ /**
+ * Boost specified on the document id.
+ */
+ private Float idBoost;
+
+ /**
+ * The bridge used for the document id.
+ */
+ private TwoWayFieldBridge idBridge;
+
+ /**
+ * Flag indicating whether there is an explicit id (@DocumentId or @Id) or not. When Search is used as make
+ * for example using JBoss Cache Searchable the <code>idKeywordName</code> will be provided.
+ */
+ private boolean idProvided = false;
+
+
+ //if composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ private boolean safeFromTupleId;
+
+ /**
+ * Creates a document builder for entities annotated with <code>@Indexed</code>.
+ *
+ * @param clazz The class for which to build a <code>DocumentBuilderContainedEntity</code>.
+ * @param context Handle to default configuration settings.
+ * @param directoryProviders Arrays of directory providers for the underlying Lucene indexes of the indexed entity.
+ * @param shardingStrategy The sharding strategy used for the indexed entity.
+ * @param reflectionManager Reflection manager to use for processing the annotations.
+ */
+ public DocumentBuilderIndexedEntity(XClass clazz, InitContext context, DirectoryProvider[] directoryProviders,
+ IndexShardingStrategy shardingStrategy, ReflectionManager reflectionManager) {
+
+ super( clazz, context, reflectionManager );
+
+ this.entityState = EntityState.INDEXED;
+ this.directoryProviders = directoryProviders;
+ this.shardingStrategy = shardingStrategy;
+ }
+
+ protected void init(XClass clazz, InitContext context) {
+ super.init( clazz, context );
+
+ // special case @ProvidedId
+ ProvidedId provided = findProvidedId( clazz, reflectionManager );
+ if ( provided != null ) {
+ idBridge = BridgeFactory.extractTwoWayType( provided.bridge() );
+ idKeywordName = provided.name();
+ }
+
+ if ( idKeywordName == null ) {
+ throw new SearchException( "No document id in: " + clazz.getName() );
+ }
+
+ //if composite id, use of (a, b) in ((1,2),(3,4)) fails on most database
+ //a TwoWayString2FieldBridgeAdaptor is never a composite id
+ safeFromTupleId = TwoWayString2FieldBridgeAdaptor.class.isAssignableFrom( idBridge.getClass() );
+
+ checkAllowFieldSelection();
+ if ( log.isDebugEnabled() ) {
+ log.debug(
+ "Field selection in projections is set to {} for entity {}.",
+ allowFieldSelectionInProjection,
+ clazz.getName()
+ );
+ }
+ }
+
+ protected void checkDocumentId(XProperty member, PropertiesMetadata propertiesMetadata, boolean isRoot, String prefix, InitContext context) {
+ Annotation idAnnotation = getIdAnnotation( member, context );
+ if ( idAnnotation != null ) {
+ String attributeName = getIdAttributeName( member, idAnnotation );
+ if ( isRoot ) {
+ if ( idKeywordName != null && explicitDocumentId ) {
+ throw new AssertionFailure(
+ "Two document id assigned: "
+ + idKeywordName + " and " + attributeName
+ );
+ }
+ idKeywordName = prefix + attributeName;
+ FieldBridge fieldBridge = BridgeFactory.guessType( null, member, reflectionManager );
+ if ( fieldBridge instanceof TwoWayFieldBridge ) {
+ idBridge = ( TwoWayFieldBridge ) fieldBridge;
+ }
+ else {
+ throw new SearchException(
+ "Bridge for document id does not implement TwoWayFieldBridge: " + member.getName()
+ );
+ }
+ idBoost = getBoost( member, null );
+ ReflectionHelper.setAccessible( member );
+ idGetter = member;
+ }
+ else {
+ //component should index their document id
+ ReflectionHelper.setAccessible( member );
+ propertiesMetadata.fieldGetters.add( member );
+ String fieldName = prefix + attributeName;
+ propertiesMetadata.fieldNames.add( fieldName );
+ propertiesMetadata.fieldStore.add( getStore( Store.YES ) );
+ propertiesMetadata.fieldIndex.add( getIndex( Index.UN_TOKENIZED ) );
+ propertiesMetadata.fieldTermVectors.add( getTermVector( TermVector.NO ) );
+ propertiesMetadata.fieldBridges.add( BridgeFactory.guessType( null, member, reflectionManager ) );
+ propertiesMetadata.fieldBoosts.add( getBoost( member, null ) );
+ propertiesMetadata.dynamicFieldBoosts.add( getDynamicBoost( member ) );
+ // property > entity analyzer (no field analyzer)
+ Analyzer analyzer = getAnalyzer( member, context );
+ if ( analyzer == null ) {
+ analyzer = propertiesMetadata.analyzer;
+ }
+ if ( analyzer == null ) {
+ throw new AssertionFailure( "Analizer should not be undefined" );
+ }
+ this.analyzer.addScopedAnalyzer( fieldName, analyzer );
+ }
+ }
+ }
+
+ /**
+ * Checks whether the specified property contains an annotation used as document id.
+ * This can either be an explicit <code>@DocumentId</code> or if no <code>@DocumentId</code> is specified a
+ * JPA <code>@Id</code> annotation. The check for the JPA annotation is indirectly to avoid a hard dependency
+ * to Hibernate Annotations.
+ *
+ * @param member the property to check for the id annotation.
+ * @param context Handle to default configuration settings.
+ *
+ * @return the annotation used as document id or <code>null</code> if id annotation is specified on the property.
+ */
+ private Annotation getIdAnnotation(XProperty member, InitContext context) {
+ Annotation idAnnotation = null;
+
+ // check for explicit DocumentId
+ DocumentId documentIdAnn = member.getAnnotation( DocumentId.class );
+ if ( documentIdAnn != null ) {
+ explicitDocumentId = true;
+ idAnnotation = documentIdAnn;
+ }
+ // check for JPA @Id
+ else if ( !explicitDocumentId && context.isJpaPresent() ) {
+ Annotation jpaId;
+ try {
+ @SuppressWarnings("unchecked")
+ Class<? extends Annotation> jpaIdClass =
+ org.hibernate.annotations.common.util.ReflectHelper
+ .classForName( "javax.persistence.Id", InitContext.class );
+ jpaId = member.getAnnotation( jpaIdClass );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException( "Unable to load @Id.class even though it should be present ?!" );
+ }
+ if ( jpaId != null ) {
+ log.debug( "Found JPA id and using it as document id" );
+ idAnnotation = jpaId;
+ }
+ }
+ return idAnnotation;
+ }
+
+ private ProvidedId findProvidedId(XClass clazz, ReflectionManager reflectionManager) {
+ ProvidedId id = null;
+ XClass currentClass = clazz;
+ while ( id == null && ( !reflectionManager.equals( currentClass, Object.class ) ) ) {
+ id = currentClass.getAnnotation( ProvidedId.class );
+ currentClass = currentClass.getSuperclass();
+ }
+ return id;
+ }
+
+ //TODO could we use T instead of EntityClass?
+ public void addWorkToQueue(Class<T> entityClass, T entity, Serializable id, WorkType workType, List<LuceneWork> queue, SearchFactoryImplementor searchFactoryImplementor) {
+ //TODO with the caller loop we are in a n^2: optimize it using a HashMap for work recognition
+
+ boolean sameIdWasSetToBeDeleted = false;
+ List<LuceneWork> toDelete = new ArrayList<LuceneWork>();
+ boolean duplicateDelete = false;
+ for ( LuceneWork luceneWork : queue ) {
+ if ( luceneWork.getEntityClass() == entityClass ) {
+ Serializable currentId = luceneWork.getId();
+ if ( currentId != null && currentId.equals( id ) ) { //find a way to use Type.equals(x,y)
+ if ( luceneWork instanceof DeleteLuceneWork ) {
+ //flag this work as related to a to-be-deleted entity
+ sameIdWasSetToBeDeleted = true;
+ }
+ else if ( luceneWork instanceof AddLuceneWork ) {
+ //if a later work in the queue is adding it back, undo deletion flag:
+ sameIdWasSetToBeDeleted = false;
+ }
+ if ( workType == WorkType.DELETE ) { //TODO add PURGE?
+ //DELETE should have precedence over any update before (HSEARCH-257)
+ //if an Add work is here, remove it
+ //if an other delete is here remember but still search for Add
+ if ( luceneWork instanceof AddLuceneWork ) {
+ toDelete.add( luceneWork );
+ }
+ else if ( luceneWork instanceof DeleteLuceneWork ) {
+ duplicateDelete = true;
+ }
+ }
+ if ( workType == WorkType.ADD ) {
+ if ( luceneWork instanceof AddLuceneWork ) {
+ //embedded objects may issue an "UPDATE" right before the "ADD",
+ //leading to double insertions in the index
+ toDelete.add( luceneWork );
+ }
+ }
+ //TODO do something to avoid multiple PURGE ALL and OPTIMIZE
+ }
+ }
+ }
+
+ if ( sameIdWasSetToBeDeleted && workType == WorkType.COLLECTION ) {
+ //avoid updating (and thus adding) objects which are going to be deleted
+ return;
+ }
+
+ for ( LuceneWork luceneWork : toDelete ) {
+ queue.remove( luceneWork );
+ }
+ if ( duplicateDelete ) {
+ return;
+ }
+
+ if ( workType == WorkType.ADD ) {
+ String idInString = idBridge.objectToString( id );
+ queue.add( createAddWork( entityClass, entity, id, idInString, false ) );
+ }
+ else if ( workType == WorkType.DELETE || workType == WorkType.PURGE ) {
+ String idInString = idBridge.objectToString( id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ }
+ else if ( workType == WorkType.PURGE_ALL ) {
+ queue.add( new PurgeAllLuceneWork( entityClass ) );
+ }
+ else if ( workType == WorkType.UPDATE || workType == WorkType.COLLECTION ) {
+ String idInString = idBridge.objectToString( id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( createAddWork( entityClass, entity, id, idInString, false ) );
+ }
+ else if ( workType == WorkType.INDEX ) {
+ String idInString = idBridge.objectToString( id );
+ queue.add( new DeleteLuceneWork( id, idInString, entityClass ) );
+ queue.add( createAddWork( entityClass, entity, id, idInString, true ) );
+ }
+ else {
+ throw new AssertionFailure( "Unknown WorkType: " + workType );
+ }
+
+ super.addWorkToQueue( entityClass, entity, id, workType, queue, searchFactoryImplementor );
+ }
+
+ public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, boolean isBatch) {
+ Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
+ Document doc = getDocument( entity, id, fieldToAnalyzerMap );
+ AddLuceneWork addWork;
+ if ( fieldToAnalyzerMap.isEmpty() ) {
+ addWork = new AddLuceneWork( id, idInString, entityClass, doc, isBatch );
+ }
+ else {
+ addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap, isBatch );
+ }
+ return addWork;
+ }
+
+ /**
+ * Builds the Lucene <code>Document</code> for a given entity <code>instance</code> and its <code>id</code>.
+ *
+ * @param instance The entity for which to build the matching Lucene <code>Document</code>
+ * @param id the entity id.
+ * @param fieldToAnalyzerMap this maps gets populated while generating the <code>Document</code>.
+ * It allows to specify for any document field a named analyzer to use. This parameter cannot be <code>null</code>.
+ *
+ * @return The Lucene <code>Document</code> for the specified entity.
+ */
+ public Document getDocument(T instance, Serializable id, Map<String, String> fieldToAnalyzerMap) {
+ if ( fieldToAnalyzerMap == null ) {
+ throw new IllegalArgumentException( "fieldToAnalyzerMap cannot be null" );
+ }
+
+ Document doc = new Document();
+ final Class<?> entityType = Hibernate.getClass( instance );
+ doc.setBoost( metadata.getClassBoost( instance ) );
+
+ // add the class name of the entity to the document
+ Field classField =
+ new Field(
+ CLASS_FIELDNAME,
+ entityType.getName(),
+ Field.Store.YES,
+ Field.Index.NOT_ANALYZED,
+ Field.TermVector.NO
+ );
+ doc.add( classField );
+
+ // now add the entity id to the document
+ LuceneOptions luceneOptions = new LuceneOptionsImpl(
+ Field.Store.YES,
+ Field.Index.NOT_ANALYZED, Field.TermVector.NO, idBoost
+ );
+ idBridge.set( idKeywordName, id, doc, luceneOptions );
+
+ // finally add all other document fields
+ Set<String> processedFieldNames = new HashSet<String>();
+ buildDocumentFields( instance, doc, metadata, fieldToAnalyzerMap, processedFieldNames );
+ return doc;
+ }
+
+ private void buildDocumentFields(Object instance, Document doc, PropertiesMetadata propertiesMetadata, Map<String, String> fieldToAnalyzerMap,
+ Set<String> processedFieldNames) {
+ if ( instance == null ) {
+ return;
+ }
+
+ // needed for field access: I cannot work in the proxied version
+ Object unproxiedInstance = unproxy( instance );
+
+ // process the class bridges
+ for ( int i = 0; i < propertiesMetadata.classBridges.size(); i++ ) {
+ FieldBridge fb = propertiesMetadata.classBridges.get( i );
+ fb.set(
+ propertiesMetadata.classNames.get( i ), unproxiedInstance,
+ doc, propertiesMetadata.getClassLuceneOptions( i )
+ );
+ }
+
+ // process the indexed fields
+ for ( int i = 0; i < propertiesMetadata.fieldNames.size(); i++ ) {
+ XMember member = propertiesMetadata.fieldGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( unproxiedInstance, member );
+ propertiesMetadata.fieldBridges.get( i ).set(
+ propertiesMetadata.fieldNames.get( i ), value, doc,
+ propertiesMetadata.getFieldLuceneOptions( i, value )
+ );
+ }
+
+ // allow analyzer override for the fields added by the class and field bridges
+ allowAnalyzerDiscriminatorOverride(
+ doc, propertiesMetadata, fieldToAnalyzerMap, processedFieldNames, unproxiedInstance
+ );
+
+ // recursively process embedded objects
+ for ( int i = 0; i < propertiesMetadata.embeddedGetters.size(); i++ ) {
+ XMember member = propertiesMetadata.embeddedGetters.get( i );
+ Object value = ReflectionHelper.getMemberValue( unproxiedInstance, member );
+ //TODO handle boost at embedded level: already stored in propertiesMedatada.boost
+
+ if ( value == null ) {
+ continue;
+ }
+ PropertiesMetadata embeddedMetadata = propertiesMetadata.embeddedPropertiesMetadata.get( i );
+ switch ( propertiesMetadata.embeddedContainers.get( i ) ) {
+ case ARRAY:
+ for ( Object arrayValue : ( Object[] ) value ) {
+ buildDocumentFields(
+ arrayValue, doc, embeddedMetadata, fieldToAnalyzerMap, processedFieldNames
+ );
+ }
+ break;
+ case COLLECTION:
+ for ( Object collectionValue : ( Collection ) value ) {
+ buildDocumentFields(
+ collectionValue, doc, embeddedMetadata, fieldToAnalyzerMap, processedFieldNames
+ );
+ }
+ break;
+ case MAP:
+ for ( Object collectionValue : ( ( Map ) value ).values() ) {
+ buildDocumentFields(
+ collectionValue, doc, embeddedMetadata, fieldToAnalyzerMap, processedFieldNames
+ );
+ }
+ break;
+ case OBJECT:
+ buildDocumentFields( value, doc, embeddedMetadata, fieldToAnalyzerMap, processedFieldNames );
+ break;
+ default:
+ throw new AssertionFailure(
+ "Unknown embedded container: "
+ + propertiesMetadata.embeddedContainers.get( i )
+ );
+ }
+ }
+ }
+
+ /**
+ * Allows a analyzer discriminator to override the analyzer used for any field in the Lucene document.
+ *
+ * @param doc The Lucene <code>Document</code> which shall be indexed.
+ * @param propertiesMetadata The metadata for the entity we currently add to the document.
+ * @param fieldToAnalyzerMap This map contains the actual override data. It is a map between document fields names and
+ * analyzer definition names. This map will be added to the <code>Work</code> instance and processed at actual indexing time.
+ * @param processedFieldNames A list of field names we have already processed.
+ * @param unproxiedInstance The entity we currently "add" to the document.
+ */
+ private void allowAnalyzerDiscriminatorOverride(Document doc, PropertiesMetadata propertiesMetadata, Map<String, String> fieldToAnalyzerMap, Set<String> processedFieldNames, Object unproxiedInstance) {
+ Discriminator discriminator = propertiesMetadata.discriminator;
+ if ( discriminator == null ) {
+ return;
+ }
+
+ Object value = null;
+ if ( propertiesMetadata.discriminatorGetter != null ) {
+ value = ReflectionHelper.getMemberValue( unproxiedInstance, propertiesMetadata.discriminatorGetter );
+ }
+
+ // now we give the discriminator the opportunity to specify a analyzer per field level
+ for ( Object o : doc.getFields() ) {
+ Field field = ( Field ) o;
+ if ( !processedFieldNames.contains( field.name() ) ) {
+ String analyzerName = discriminator.getAnalyzerDefinitionName( value, unproxiedInstance, field.name() );
+ if ( analyzerName != null ) {
+ fieldToAnalyzerMap.put( field.name(), analyzerName );
+ }
+ processedFieldNames.add( field.name() );
+ }
+ }
+ }
+
+ private Object unproxy(Object value) {
+ //FIXME this service should be part of Core?
+ if ( value instanceof HibernateProxy ) {
+ // .getImplementation() initializes the data by side effect
+ value = ( ( HibernateProxy ) value ).getHibernateLazyInitializer()
+ .getImplementation();
+ }
+ return value;
+ }
+
+ public String getIdentifierName() {
+ return idGetter.getName();
+ }
+
+ public DirectoryProvider[] getDirectoryProviders() {
+ if ( entityState != EntityState.INDEXED ) {
+ throw new AssertionFailure( "Contained in only entity: getDirectoryProvider should not have been called." );
+ }
+ return directoryProviders;
+ }
+
+ public IndexShardingStrategy getDirectoryProviderSelectionStrategy() {
+ if ( entityState != EntityState.INDEXED ) {
+ throw new AssertionFailure(
+ "Contained in only entity: getDirectoryProviderSelectionStrategy should not have been called."
+ );
+ }
+ return shardingStrategy;
+ }
+
+ public boolean allowFieldSelectionInProjection() {
+ return allowFieldSelectionInProjection;
+ }
+
+ /**
+ * @return <code>false</code> if there is a risk of composite id. If composite id, use of (a, b) in ((1,2), (3,4)) fails on most database
+ */
+ public boolean isSafeFromTupleId() {
+ return safeFromTupleId;
+ }
+
+ public Term getTerm(Serializable id) {
+ if ( idProvided ) {
+ return new Term( idKeywordName, ( String ) id );
+ }
+
+ return new Term( idKeywordName, idBridge.objectToString( id ) );
+ }
+
+ public TwoWayFieldBridge getIdBridge() {
+ return idBridge;
+ }
+
+ public static Class getDocumentClass(Document document) {
+ String className = document.get( CLASS_FIELDNAME );
+ try {
+ return ReflectHelper.classForName( className );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException( "Unable to load indexed class: " + className, e );
+ }
+ }
+
+ public String getIdKeywordName() {
+ return idKeywordName;
+ }
+
+ /**
+ * Return the entity id if possible
+ * An IllegalStateException otherwise
+ *
+ * If the id is provided, we can't extract it from the entity
+ *
+ * @return entity id
+ */
+ public Serializable getId(Object entity) {
+ if( entity == null || idGetter == null) throw new IllegalStateException( "Cannot guess id form entity");
+ return ( Serializable ) ReflectionHelper.getMemberValue( entity, idGetter );
+ }
+
+ public static Serializable getDocumentId(SearchFactoryImplementor searchFactoryImplementor, Class<?> clazz, Document document) {
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity = searchFactoryImplementor.getDocumentBuilderIndexedEntity(
+ clazz
+ );
+ if ( builderIndexedEntity == null ) {
+ throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ }
+ return ( Serializable ) builderIndexedEntity.getIdBridge()
+ .get( builderIndexedEntity.getIdKeywordName(), document );
+ }
+
+ public static Object[] getDocumentFields(SearchFactoryImplementor searchFactoryImplementor, Class<?> clazz, Document document, String[] fields) {
+ DocumentBuilderIndexedEntity<?> builderIndexedEntity = searchFactoryImplementor.getDocumentBuilderIndexedEntity(
+ clazz
+ );
+ if ( builderIndexedEntity == null ) {
+ throw new SearchException( "No Lucene configuration set up for: " + clazz.getName() );
+ }
+ final int fieldNbr = fields.length;
+ Object[] result = new Object[fieldNbr];
+
+ if ( builderIndexedEntity.idKeywordName != null ) {
+ populateResult(
+ builderIndexedEntity.idKeywordName,
+ builderIndexedEntity.idBridge,
+ Field.Store.YES,
+ fields,
+ result,
+ document
+ );
+ }
+
+ final PropertiesMetadata metadata = builderIndexedEntity.metadata;
+ processFieldsForProjection( metadata, fields, result, document );
+ return result;
+ }
+
+ private static void populateResult(String fieldName, FieldBridge fieldBridge, Field.Store store,
+ String[] fields, Object[] result, Document document) {
+ int matchingPosition = getFieldPosition( fields, fieldName );
+ if ( matchingPosition != -1 ) {
+ //TODO make use of an isTwoWay() method
+ if ( store != Field.Store.NO && TwoWayFieldBridge.class.isAssignableFrom( fieldBridge.getClass() ) ) {
+ result[matchingPosition] = ( ( TwoWayFieldBridge ) fieldBridge ).get( fieldName, document );
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Field {} projected as {}", fieldName, result[matchingPosition] );
+ }
+ }
+ else {
+ if ( store == Field.Store.NO ) {
+ throw new SearchException( "Projecting an unstored field: " + fieldName );
+ }
+ else {
+ throw new SearchException( "FieldBridge is not a TwoWayFieldBridge: " + fieldBridge.getClass() );
+ }
+ }
+ }
+ }
+
+ private static void processFieldsForProjection(PropertiesMetadata metadata, String[] fields, Object[] result, Document document) {
+ //process base fields
+ final int nbrFoEntityFields = metadata.fieldNames.size();
+ for ( int index = 0; index < nbrFoEntityFields; index++ ) {
+ populateResult(
+ metadata.fieldNames.get( index ),
+ metadata.fieldBridges.get( index ),
+ metadata.fieldStore.get( index ),
+ fields,
+ result,
+ document
+ );
+ }
+
+ //process fields of embedded
+ final int nbrOfEmbeddedObjects = metadata.embeddedPropertiesMetadata.size();
+ for ( int index = 0; index < nbrOfEmbeddedObjects; index++ ) {
+ //there is nothing we can do for collections
+ if ( metadata.embeddedContainers.get( index ) == PropertiesMetadata.Container.OBJECT ) {
+ processFieldsForProjection(
+ metadata.embeddedPropertiesMetadata.get( index ), fields, result, document
+ );
+ }
+ }
+
+ //process class bridges
+ final int nbrOfClassBridges = metadata.classBridges.size();
+ for ( int index = 0; index < nbrOfClassBridges; index++ ) {
+ populateResult(
+ metadata.classNames.get(index),
+ metadata.classBridges.get(index),
+ metadata.classStores.get(index),
+ fields,
+ result,
+ document
+ );
+ }
+ }
+
+ private static int getFieldPosition(String[] fields, String fieldName) {
+ int fieldNbr = fields.length;
+ for ( int index = 0; index < fieldNbr; index++ ) {
+ if ( fieldName.equals( fields[index] ) ) {
+ return index;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Checks whether all involved bridges are two way string bridges. If so we can optimize document retrieval
+ * by using <code>FieldSelector</code>. See HSEARCH-213.
+ */
+ private void checkAllowFieldSelection() {
+ allowFieldSelectionInProjection = true;
+ if ( !( idBridge instanceof TwoWayStringBridge || idBridge instanceof TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ for ( FieldBridge bridge : metadata.fieldBridges ) {
+ if ( !( bridge instanceof TwoWayStringBridge || bridge instanceof TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ }
+ for ( FieldBridge bridge : metadata.classBridges ) {
+ if ( !( bridge instanceof TwoWayStringBridge || bridge instanceof TwoWayString2FieldBridgeAdaptor ) ) {
+ allowFieldSelectionInProjection = false;
+ return;
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentBuilderIndexedEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentExtractor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentExtractor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,146 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Set;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelectorResult;
+import org.apache.lucene.document.MapFieldSelector;
+import org.apache.lucene.document.FieldSelector;
+
+import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.query.QueryHits;
+
+/**
+ * Helper class to extract <code>EntityInfo</code>s out of the <code>QueryHits</code>.
+ *
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ * @author Hardy Ferentschik
+ */
+public class DocumentExtractor {
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final String[] projection;
+ private final QueryHits queryHits;
+ private FieldSelector fieldSelector;
+ private boolean allowFieldSelection;
+
+ public DocumentExtractor(QueryHits queryHits, SearchFactoryImplementor searchFactoryImplementor, String[] projection, Set<String> idFieldNames, boolean allowFieldSelection) {
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.projection = projection;
+ this.queryHits = queryHits;
+ this.allowFieldSelection = allowFieldSelection;
+ initFieldSelection( projection, idFieldNames );
+ }
+
+ private void initFieldSelection(String[] projection, Set<String> idFieldNames) {
+ // if we need to project DOCUMENT do not use fieldSelector as the user might want anything
+ int projectionSize = projection != null && projection.length != 0 ? projection.length : 0;
+ if ( projectionSize != 0 ) {
+ for ( String property : projection ) {
+ if ( ProjectionConstants.DOCUMENT.equals( property ) ) {
+ allowFieldSelection = false;
+ return;
+ }
+ }
+ }
+
+ // set up the field selector. CLASS_FIELDNAME and id fields are needed on top of any projected fields
+ Map<String, FieldSelectorResult> fields = new HashMap<String, FieldSelectorResult>( 1 + idFieldNames.size() + projectionSize );
+ fields.put( DocumentBuilder.CLASS_FIELDNAME, FieldSelectorResult.LOAD );
+ for ( String idFieldName : idFieldNames ) {
+ fields.put( idFieldName, FieldSelectorResult.LOAD );
+ }
+ if ( projectionSize != 0 ) {
+ for ( String projectedField : projection ) {
+ fields.put( projectedField, FieldSelectorResult.LOAD );
+ }
+ }
+ this.fieldSelector = new MapFieldSelector( fields );
+ }
+
+ private EntityInfo extract(Document document) {
+ Class clazz = DocumentBuilderIndexedEntity.getDocumentClass( document );
+ Serializable id = DocumentBuilderIndexedEntity.getDocumentId( searchFactoryImplementor, clazz, document );
+ Object[] projected = null;
+ if ( projection != null && projection.length > 0 ) {
+ projected = DocumentBuilderIndexedEntity.getDocumentFields(
+ searchFactoryImplementor, clazz, document, projection
+ );
+ }
+ return new EntityInfo( clazz, id, projected );
+ }
+
+ public EntityInfo extract(int index) throws IOException {
+ Document doc;
+ if ( allowFieldSelection ) {
+ doc = queryHits.doc( index, fieldSelector );
+ }
+ else {
+ doc = queryHits.doc( index );
+ }
+
+ EntityInfo entityInfo = extract( doc );
+ Object[] eip = entityInfo.projection;
+
+ // TODO - if we are only looking for score (unlikely), avoid accessing doc (lazy load)
+ if ( eip != null && eip.length > 0 ) {
+ for ( int x = 0; x < projection.length; x++ ) {
+ if ( ProjectionConstants.SCORE.equals( projection[x] ) ) {
+ eip[x] = queryHits.score( index );
+ }
+ else if ( ProjectionConstants.ID.equals( projection[x] ) ) {
+ eip[x] = entityInfo.id;
+ }
+ else if ( ProjectionConstants.DOCUMENT.equals( projection[x] ) ) {
+ eip[x] = doc;
+ }
+ else if ( ProjectionConstants.DOCUMENT_ID.equals( projection[x] ) ) {
+ eip[x] = queryHits.docId( index );
+ }
+ else if ( ProjectionConstants.BOOST.equals( projection[x] ) ) {
+ eip[x] = doc.getBoost();
+ }
+ else if ( ProjectionConstants.EXPLANATION.equals( projection[x] ) ) {
+ eip[x] = queryHits.explain( index );
+ }
+ else if ( ProjectionConstants.OBJECT_CLASS.equals( projection[x] ) ) {
+ eip[x] = entityInfo.clazz;
+ }
+ else if ( ProjectionConstants.THIS.equals( projection[x] ) ) {
+ //THIS could be projected more than once
+ //THIS loading delayed to the Loader phase
+ entityInfo.indexesOfThis.add( x );
+ }
+ }
+ }
+ return entityInfo;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/DocumentExtractor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityInfo.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityInfo.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityInfo.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,48 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ *
+ * @author Emmanuel Bernard
+ */
+public class EntityInfo {
+
+ public final Class clazz;
+ public final Serializable id;
+ public final Object[] projection;
+ public final List<Integer> indexesOfThis = new LinkedList<Integer>();
+
+ public EntityInfo(Class clazz, Serializable id, Object[] projection) {
+ this.clazz = clazz;
+ this.id = id;
+ this.projection = projection;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityInfo.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityState.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityState.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityState.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+/**
+ * Entity state with regard to indexing possibilities
+ *
+ * @author Emmanuel Bernard
+ */
+public enum EntityState {
+ INDEXED,
+ CONTAINED_IN_ONLY,
+ NON_INDEXABLE
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/EntityState.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/FilterDef.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/FilterDef.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/FilterDef.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,102 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.FilterCacheModeType;
+import org.hibernate.search.annotations.FullTextFilterDef;
+
+/**
+ * A wrapper class which encapsulates all required information to create a defined filter.
+ *
+ * @author Emmanuel Bernard
+ */
+//TODO serialization
+public class FilterDef {
+ private Method factoryMethod;
+ private Method keyMethod;
+ private Map<String, Method> setters = new HashMap<String, Method>();
+ private final FilterCacheModeType cacheMode;
+ private final Class<?> impl;
+ private final String name;
+
+ public FilterDef(FullTextFilterDef def) {
+ this.name = def.name();
+ this.impl = def.impl();
+ this.cacheMode = def.cache();
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public FilterCacheModeType getCacheMode() {
+ return cacheMode;
+ }
+
+ public Class<?> getImpl() {
+ return impl;
+ }
+
+ public Method getFactoryMethod() {
+ return factoryMethod;
+ }
+
+ public void setFactoryMethod(Method factoryMethod) {
+ this.factoryMethod = factoryMethod;
+ }
+
+ public Method getKeyMethod() {
+ return keyMethod;
+ }
+
+ public void setKeyMethod(Method keyMethod) {
+ this.keyMethod = keyMethod;
+ }
+
+ public void addSetter(String name, Method method) {
+ if ( method.isAccessible() ) method.setAccessible( true );
+ setters.put( name, method );
+ }
+
+ public void invoke(String parameterName, Object filter, Object parameterValue) {
+ Method method = setters.get( parameterName );
+ if ( method == null ) throw new SearchException( "No setter " + parameterName + " found in " + this.impl );
+ try {
+ method.invoke( filter, parameterValue );
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to set Filter parameter: " + parameterName + " on filter class: " + this.impl, e );
+ }
+ catch (InvocationTargetException e) {
+ throw new SearchException( "Unable to set Filter parameter: " + parameterName + " on filter class: " + this.impl, e );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/FilterDef.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/Loader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/Loader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/Loader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.List;
+
+import org.hibernate.Session;
+import org.hibernate.search.engine.EntityInfo;
+
+/**
+ * Interface defining a set of operations in order to load entities which matched a query. Depending on the type of
+ * indexed entities and the type of query different strategies can be used.
+ *
+ *
+ * @author Emmanuel Bernard
+ */
+public interface Loader {
+ void init(Session session, SearchFactoryImplementor searchFactoryImplementor);
+
+ Object load(EntityInfo entityInfo);
+
+ List load(EntityInfo... entityInfos);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/Loader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LoaderHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LoaderHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LoaderHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class LoaderHelper {
+ private static final List<Class> objectNotFoundExceptions;
+
+ static {
+ objectNotFoundExceptions = new ArrayList<Class>(2);
+ try {
+ objectNotFoundExceptions.add(
+ ReflectHelper.classForName( "org.hibernate.ObjectNotFoundException" )
+ );
+ }
+ catch (ClassNotFoundException e) {
+ //leave it alone
+ }
+ try {
+ objectNotFoundExceptions.add(
+ ReflectHelper.classForName( "javax.persistence.EntityNotFoundException" )
+ );
+ }
+ catch (ClassNotFoundException e) {
+ //leave it alone
+ }
+ }
+
+ public static boolean isObjectNotFoundException(RuntimeException e) {
+ boolean objectNotFound = false;
+ Class exceptionClass = e.getClass();
+ for ( Class clazz : objectNotFoundExceptions) {
+ if ( clazz.isAssignableFrom( exceptionClass ) ) {
+ objectNotFound = true;
+ break;
+ }
+ }
+ return objectNotFound;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LoaderHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LuceneOptionsImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LuceneOptionsImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LuceneOptionsImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import org.apache.lucene.document.Field.Index;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.Field.TermVector;
+
+import org.hibernate.search.bridge.LuceneOptions;
+
+/**
+ * A wrapper class for Lucene parameters needed for indexing.
+ * This is a package level class
+ *
+ * @author Hardy Ferentschik
+ */
+class LuceneOptionsImpl implements LuceneOptions {
+ private final Store store;
+ private final Index index;
+ private final TermVector termVector;
+ private final Float boost;
+
+ public LuceneOptionsImpl(Store store, Index index, TermVector termVector, Float boost) {
+ this.store = store;
+ this.index = index;
+ this.termVector = termVector;
+ this.boost = boost;
+ }
+
+ public Store getStore() {
+ return store;
+ }
+
+ public Index getIndex() {
+ return index;
+ }
+
+ public TermVector getTermVector() {
+ return termVector;
+ }
+
+ /**
+ * @return the boost value. If <code>boost == null</code>, the default boost value
+ * 1.0 is returned.
+ */
+ public Float getBoost() {
+ if ( boost != null ) {
+ return boost;
+ } else {
+ return 1.0f;
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/LuceneOptionsImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/MultiClassesQueryLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/MultiClassesQueryLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/MultiClassesQueryLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,146 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.hibernate.Criteria;
+import org.hibernate.Session;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MultiClassesQueryLoader implements Loader {
+ private Session session;
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private List<RootEntityMetadata> entityMatadata;
+ //useful if loading with a query is unsafe
+ private ObjectLoader objectLoader;
+
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
+ this.session = session;
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ this.objectLoader = new ObjectLoader();
+ this.objectLoader.init( session, searchFactoryImplementor );
+ }
+
+ public void setEntityTypes(Set<Class<?>> entityTypes) {
+ List<Class<?>> safeEntityTypes = new ArrayList<Class<?>>();
+ //TODO should we go find the root entity for a given class rather than just checking for it's root status?
+ // root entity could lead to quite inefficient queries in Hibernate when using table per class
+ if ( entityTypes.size() == 0 ) {
+ //support all classes
+ for( Map.Entry<Class<?>, DocumentBuilderIndexedEntity<?>> entry : searchFactoryImplementor.getDocumentBuildersIndexedEntities().entrySet() ) {
+ //get only root entities to limit queries
+ if ( entry.getValue().isRoot() ) {
+ safeEntityTypes.add( entry.getKey() );
+ }
+ }
+ }
+ else {
+ safeEntityTypes.addAll(entityTypes);
+ }
+ entityMatadata = new ArrayList<RootEntityMetadata>( safeEntityTypes.size() );
+ for (Class clazz : safeEntityTypes) {
+ entityMatadata.add( new RootEntityMetadata( clazz, searchFactoryImplementor, session ) );
+ }
+ }
+
+ public Object load(EntityInfo entityInfo) {
+ return ObjectLoaderHelper.load( entityInfo, session );
+ }
+
+ public List load(EntityInfo... entityInfos) {
+ if ( entityInfos.length == 0 ) return Collections.EMPTY_LIST;
+ if ( entityInfos.length == 1 ) {
+ final Object entity = load( entityInfos[0] );
+ if ( entity == null ) {
+ return Collections.EMPTY_LIST;
+ }
+ else {
+ final List<Object> list = new ArrayList<Object>( 1 );
+ list.add( entity );
+ return list;
+ }
+ }
+
+ //split EntityInfo per root entity
+ Map<RootEntityMetadata, List<EntityInfo>> entityinfoBuckets =
+ new HashMap<RootEntityMetadata, List<EntityInfo>>( entityMatadata.size());
+ for (EntityInfo entityInfo : entityInfos) {
+ boolean found = false;
+ for (RootEntityMetadata rootEntityInfo : entityMatadata) {
+ if ( rootEntityInfo.rootEntity == entityInfo.clazz || rootEntityInfo.mappedSubclasses.contains( entityInfo.clazz ) ) {
+ List<EntityInfo> bucket = entityinfoBuckets.get( rootEntityInfo );
+ if ( bucket == null ) {
+ bucket = new ArrayList<EntityInfo>();
+ entityinfoBuckets.put( rootEntityInfo, bucket );
+ }
+ bucket.add( entityInfo );
+ found = true;
+ break; //we stop looping for the right bucket
+ }
+ }
+ if (!found) throw new AssertionFailure( "Could not find root entity for " + entityInfo.clazz );
+ }
+
+ //initialize objects by bucket
+ for ( Map.Entry<RootEntityMetadata, List<EntityInfo>> entry : entityinfoBuckets.entrySet() ) {
+ final RootEntityMetadata key = entry.getKey();
+ final List<EntityInfo> value = entry.getValue();
+ final EntityInfo[] bucketEntityInfos = value.toArray( new EntityInfo[value.size()] );
+ if ( key.useObjectLoader ) {
+ objectLoader.load( bucketEntityInfos );
+ }
+ else {
+ ObjectLoaderHelper.initializeObjects( bucketEntityInfos,
+ key.criteria, key.rootEntity, searchFactoryImplementor);
+ }
+ }
+ return ObjectLoaderHelper.returnAlreadyLoadedObjectsInCorrectOrder( entityInfos, session );
+ }
+
+ private static class RootEntityMetadata {
+ public final Class<?> rootEntity;
+ public final Set<Class<?>> mappedSubclasses;
+ private final Criteria criteria;
+ public final boolean useObjectLoader;
+
+ RootEntityMetadata(Class<?> rootEntity, SearchFactoryImplementor searchFactoryImplementor, Session session) {
+ this.rootEntity = rootEntity;
+ DocumentBuilderIndexedEntity<?> provider = searchFactoryImplementor.getDocumentBuilderIndexedEntity( rootEntity );
+ if ( provider == null) throw new AssertionFailure("Provider not found for class: " + rootEntity);
+ this.mappedSubclasses = provider.getMappedSubclasses();
+ this.criteria = session.createCriteria( rootEntity );
+ this.useObjectLoader = !provider.isSafeFromTupleId();
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/MultiClassesQueryLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.slf4j.Logger;
+
+import org.hibernate.Hibernate;
+import org.hibernate.Session;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ObjectLoader implements Loader {
+ private static final Logger log = LoggerFactory.make();
+ private Session session;
+
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
+ this.session = session;
+ }
+
+ public Object load(EntityInfo entityInfo) {
+ return ObjectLoaderHelper.load( entityInfo, session );
+ }
+
+ public List load(EntityInfo... entityInfos) {
+ if ( entityInfos.length == 0 ) return Collections.EMPTY_LIST;
+ if ( entityInfos.length == 1 ) {
+ final Object entity = load( entityInfos[0] );
+ if ( entity == null ) {
+ return Collections.EMPTY_LIST;
+ }
+ else {
+ final List<Object> list = new ArrayList<Object>( 1 );
+ list.add( entity );
+ return list;
+ }
+ }
+ //use load to benefit from the batch-size
+ //we don't face proxy casting issues since the exact class is extracted from the index
+ for (EntityInfo entityInfo : entityInfos) {
+ session.load( entityInfo.clazz, entityInfo.id );
+ }
+ List result = new ArrayList( entityInfos.length );
+ for (EntityInfo entityInfo : entityInfos) {
+ try {
+ Object entity = session.load( entityInfo.clazz, entityInfo.id );
+ Hibernate.initialize( entity );
+ result.add( entity );
+ }
+ catch (RuntimeException e) {
+ if ( LoaderHelper.isObjectNotFoundException( e ) ) {
+ log.debug( "Object found in Search index but not in database: {} with id {}",
+ entityInfo.clazz, entityInfo.id );
+ }
+ else {
+ throw e;
+ }
+ }
+ }
+ return result;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoaderHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoaderHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoaderHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,114 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.slf4j.Logger;
+
+import org.hibernate.Criteria;
+import org.hibernate.Hibernate;
+import org.hibernate.Session;
+import org.hibernate.criterion.Disjunction;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ObjectLoaderHelper {
+
+ private static final int MAX_IN_CLAUSE = 500;
+ private static final Logger log = LoggerFactory.make();
+
+ public static Object load(EntityInfo entityInfo, Session session) {
+ //be sure to get an initialized object but save from ONFE and ENFE
+ Object maybeProxy = session.load( entityInfo.clazz, entityInfo.id );
+ try {
+ Hibernate.initialize( maybeProxy );
+ }
+ catch (RuntimeException e) {
+ if ( LoaderHelper.isObjectNotFoundException( e ) ) {
+ log.debug( "Object found in Search index but not in database: {} with id {}",
+ entityInfo.clazz, entityInfo.id );
+ maybeProxy = null;
+ }
+ else {
+ throw e;
+ }
+ }
+ return maybeProxy;
+ }
+
+ public static void initializeObjects(EntityInfo[] entityInfos, Criteria criteria, Class<?> entityType,
+ SearchFactoryImplementor searchFactoryImplementor) {
+ final int maxResults = entityInfos.length;
+ if ( maxResults == 0 ) return;
+
+ Set<Class<?>> indexedEntities = searchFactoryImplementor.getIndexedTypesPolymorphic( new Class<?>[]{entityType} );
+ DocumentBuilderIndexedEntity<?> builder = searchFactoryImplementor.getDocumentBuilderIndexedEntity( indexedEntities.iterator().next() );
+ String idName = builder.getIdentifierName();
+ int loop = maxResults / MAX_IN_CLAUSE;
+ boolean exact = maxResults % MAX_IN_CLAUSE == 0;
+ if ( !exact ) loop++;
+ Disjunction disjunction = Restrictions.disjunction();
+ for (int index = 0; index < loop; index++) {
+ int max = index * MAX_IN_CLAUSE + MAX_IN_CLAUSE <= maxResults ?
+ index * MAX_IN_CLAUSE + MAX_IN_CLAUSE :
+ maxResults;
+ List<Serializable> ids = new ArrayList<Serializable>( max - index * MAX_IN_CLAUSE );
+ for (int entityInfoIndex = index * MAX_IN_CLAUSE; entityInfoIndex < max; entityInfoIndex++) {
+ ids.add( entityInfos[entityInfoIndex].id );
+ }
+ disjunction.add( Restrictions.in( idName, ids ) );
+ }
+ criteria.add( disjunction );
+ criteria.list(); //load all objects
+ }
+
+
+ public static List returnAlreadyLoadedObjectsInCorrectOrder(EntityInfo[] entityInfos, Session session) {
+ //mandatory to keep the same ordering
+ List result = new ArrayList( entityInfos.length );
+ for (EntityInfo entityInfo : entityInfos) {
+ Object element = session.load( entityInfo.clazz, entityInfo.id );
+ if ( Hibernate.isInitialized( element ) ) {
+ //all existing elements should have been loaded by the query,
+ //the other ones are missing ones
+ result.add( element );
+ }
+ else {
+ if ( log.isDebugEnabled() ) {
+ log.debug( "Object found in Search index but not in database: {} with {}",
+ entityInfo.clazz, entityInfo.id );
+ }
+ }
+ }
+ return result;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ObjectLoaderHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ProjectionLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ProjectionLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ProjectionLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,119 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.hibernate.Session;
+import org.hibernate.transform.ResultTransformer;
+
+/**
+ * Implementation of the <code>Loader</code> interface used for loading entities which are projected via
+ * {@link org.hibernate.search.ProjectionConstants#THIS}.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class ProjectionLoader implements Loader {
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private Session session;
+ private Loader objectLoader;
+ private Boolean projectThis;
+ private ResultTransformer transformer;
+ private String[] aliases;
+ private Set<Class<?>> entityTypes;
+
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
+ this.session = session;
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ }
+
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor, ResultTransformer transformer, String[] aliases) {
+ init( session, searchFactoryImplementor );
+ this.transformer = transformer;
+ this.aliases = aliases;
+ }
+
+ public void setEntityTypes(Set<Class<?>> entityTypes) {
+ this.entityTypes = entityTypes;
+ }
+
+ public Object load(EntityInfo entityInfo) {
+ initThisProjectionFlag( entityInfo );
+ if ( projectThis ) {
+ for ( int index : entityInfo.indexesOfThis ) {
+ entityInfo.projection[index] = objectLoader.load( entityInfo );
+ }
+ }
+ if ( transformer != null ) {
+ return transformer.transformTuple( entityInfo.projection, aliases );
+ }
+ else {
+ return entityInfo.projection;
+ }
+ }
+
+ private void initThisProjectionFlag(EntityInfo entityInfo) {
+ if ( projectThis == null ) {
+ projectThis = entityInfo.indexesOfThis.size() != 0;
+ if ( projectThis ) {
+ MultiClassesQueryLoader loader = new MultiClassesQueryLoader();
+ loader.init( session, searchFactoryImplementor );
+ loader.setEntityTypes( entityTypes );
+ objectLoader = loader;
+ }
+ }
+ }
+
+ public List load(EntityInfo... entityInfos) {
+ List results = new ArrayList( entityInfos.length );
+ if ( entityInfos.length == 0 ) {
+ return results;
+ }
+
+ initThisProjectionFlag( entityInfos[0] );
+ if ( projectThis ) {
+ objectLoader.load( entityInfos ); // load by batch
+ for ( EntityInfo entityInfo : entityInfos ) {
+ for ( int index : entityInfo.indexesOfThis ) {
+ // set one by one to avoid loosing null objects (skipped in the objectLoader.load( EntityInfo[] ))
+ entityInfo.projection[index] = objectLoader.load( entityInfo );
+ }
+ }
+ }
+ for ( EntityInfo entityInfo : entityInfos ) {
+ if ( transformer != null ) {
+ results.add( transformer.transformTuple( entityInfo.projection, aliases ) );
+ }
+ else {
+ results.add( entityInfo.projection );
+ }
+ }
+
+ return results;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/ProjectionLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/QueryLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/QueryLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/QueryLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.List;
+import java.util.Collections;
+
+import org.hibernate.Criteria;
+import org.hibernate.Session;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class QueryLoader implements Loader {
+
+ private Session session;
+ private Class entityType;
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private Criteria criteria;
+ private boolean isExplicitCriteria;
+
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
+ this.session = session;
+ this.searchFactoryImplementor = searchFactoryImplementor;
+ }
+
+ public void setEntityType(Class entityType) {
+ this.entityType = entityType;
+ }
+
+ public Object load(EntityInfo entityInfo) {
+ //if explicit criteria, make sure to use it to load the objects
+ if ( isExplicitCriteria ) load( new EntityInfo[] { entityInfo } );
+ return ObjectLoaderHelper.load( entityInfo, session );
+ }
+
+ public List load(EntityInfo... entityInfos) {
+ if ( entityInfos.length == 0 ) return Collections.EMPTY_LIST;
+ if ( entityType == null ) throw new AssertionFailure( "EntityType not defined" );
+ if ( criteria == null ) criteria = session.createCriteria( entityType );
+
+ ObjectLoaderHelper.initializeObjects( entityInfos, criteria, entityType, searchFactoryImplementor );
+ return ObjectLoaderHelper.returnAlreadyLoadedObjectsInCorrectOrder( entityInfos, session );
+ }
+
+ public void setCriteria(Criteria criteria) {
+ isExplicitCriteria = criteria != null;
+ this.criteria = criteria;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/QueryLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/SearchFactoryImplementor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/SearchFactoryImplementor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/SearchFactoryImplementor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.engine;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.search.Similarity;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneIndexingParameters;
+import org.hibernate.search.backend.Worker;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.filter.FilterCachingStrategy;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.optimization.OptimizerStrategy;
+
+/**
+ * Interface which gives access to the different directory providers and their configuration.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public interface SearchFactoryImplementor extends SearchFactory {
+ BackendQueueProcessorFactory getBackendQueueProcessorFactory();
+
+ void setBackendQueueProcessorFactory(BackendQueueProcessorFactory backendQueueProcessorFactory);
+
+ Map<Class<?>, DocumentBuilderIndexedEntity<?>> getDocumentBuildersIndexedEntities();
+
+ <T> DocumentBuilderIndexedEntity<T> getDocumentBuilderIndexedEntity(Class<T> entityType);
+
+ <T> DocumentBuilderContainedEntity<T> getDocumentBuilderContainedEntity(Class<T> entityType);
+
+ Worker getWorker();
+
+ void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy);
+
+ OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider);
+
+ FilterCachingStrategy getFilterCachingStrategy();
+
+ FilterDef getFilterDefinition(String name);
+
+ LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider);
+
+ void addIndexingParameters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams);
+
+ String getIndexingStrategy();
+
+ void close();
+
+ void addClassToDirectoryProvider(Class<?> clazz, DirectoryProvider<?> directoryProvider, boolean exclusiveIndexUsage);
+
+ Set<Class<?>> getClassesInDirectoryProvider(DirectoryProvider<?> directoryProvider);
+
+ Set<DirectoryProvider<?>> getDirectoryProviders();
+
+ ReentrantLock getDirectoryProviderLock(DirectoryProvider<?> dp);
+
+ void addDirectoryProvider(DirectoryProvider<?> provider, boolean exclusiveIndexUsage);
+
+ int getFilterCacheBitResultsSize();
+
+ Set<Class<?>> getIndexedTypesPolymorphic(Class<?>[] classes);
+
+ BatchBackend makeBatchBackend(MassIndexerProgressMonitor progressMonitor);
+
+ Similarity getSimilarity(DirectoryProvider<?> directoryProvider);
+
+ boolean isExclusiveIndexUsageEnabled(DirectoryProvider<?> provider);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/engine/SearchFactoryImplementor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/ContextHolder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/ContextHolder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/ContextHolder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,58 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.event;
+
+import java.util.WeakHashMap;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
+import org.hibernate.search.impl.SearchFactoryImpl;
+
+/**
+ * Holds already built SearchFactory per Hibernate Configuration object
+ * concurrent threads do not share this information
+ *
+ * @author Emmanuel Bernard
+ */
+public class ContextHolder {
+ private static final ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>> contexts =
+ new ThreadLocal<WeakHashMap<Configuration, SearchFactoryImpl>>();
+
+ //code doesn't have to be multithreaded because SF creation is not.
+ //this is not a public API, should really only be used during the SessionFactory building
+ public static SearchFactoryImpl getOrBuildSearchFactory(Configuration cfg) {
+ WeakHashMap<Configuration, SearchFactoryImpl> contextMap = contexts.get();
+ if ( contextMap == null ) {
+ contextMap = new WeakHashMap<Configuration, SearchFactoryImpl>( 2 );
+ contexts.set( contextMap );
+ }
+ SearchFactoryImpl searchFactory = contextMap.get( cfg );
+ if ( searchFactory == null ) {
+ searchFactory = new SearchFactoryImpl( new SearchConfigurationFromHibernateCore( cfg ) );
+ contextMap.put( cfg, searchFactory );
+ }
+ return searchFactory;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/ContextHolder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/EventListenerRegister.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/EventListenerRegister.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/EventListenerRegister.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,193 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.event;
+
+import java.util.Properties;
+
+import org.slf4j.Logger;
+
+import org.hibernate.event.EventListeners;
+import org.hibernate.event.FlushEventListener;
+import org.hibernate.event.PostCollectionRecreateEventListener;
+import org.hibernate.event.PostCollectionRemoveEventListener;
+import org.hibernate.event.PostCollectionUpdateEventListener;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Helper methods initializing Hibernate Search event listeners.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+public class EventListenerRegister {
+
+ private static final Logger log = LoggerFactory.make();
+
+ /**
+ * Add the FullTextIndexEventListener to all listeners, if enabled in configuration
+ * and if not already registered.
+ *
+ * @param listeners
+ * @param properties the Search configuration
+ */
+ public static void enableHibernateSearch(EventListeners listeners, Properties properties) {
+ // check whether search is explicitly disabled - if so there is nothing to do
+ String enableSearchListeners = properties.getProperty( Environment.AUTOREGISTER_LISTENERS );
+ if ( "false".equalsIgnoreCase( enableSearchListeners ) ) {
+ log.info(
+ "Property hibernate.search.autoregister_listeners is set to false." +
+ " No attempt will be made to register Hibernate Search event listeners."
+ );
+ return;
+ }
+ final FullTextIndexEventListener searchListener = new FullTextIndexEventListener();
+ // PostInsertEventListener
+ listeners.setPostInsertEventListeners(
+ addIfNeeded(
+ listeners.getPostInsertEventListeners(),
+ searchListener,
+ new PostInsertEventListener[] { searchListener }
+ )
+ );
+ // PostUpdateEventListener
+ listeners.setPostUpdateEventListeners(
+ addIfNeeded(
+ listeners.getPostUpdateEventListeners(),
+ searchListener,
+ new PostUpdateEventListener[] { searchListener }
+ )
+ );
+ // PostDeleteEventListener
+ listeners.setPostDeleteEventListeners(
+ addIfNeeded(
+ listeners.getPostDeleteEventListeners(),
+ searchListener,
+ new PostDeleteEventListener[] { searchListener }
+ )
+ );
+
+ // PostCollectionRecreateEventListener
+ listeners.setPostCollectionRecreateEventListeners(
+ addIfNeeded(
+ listeners.getPostCollectionRecreateEventListeners(),
+ searchListener,
+ new PostCollectionRecreateEventListener[] { searchListener }
+ )
+ );
+ // PostCollectionRemoveEventListener
+ listeners.setPostCollectionRemoveEventListeners(
+ addIfNeeded(
+ listeners.getPostCollectionRemoveEventListeners(),
+ searchListener,
+ new PostCollectionRemoveEventListener[] { searchListener }
+ )
+ );
+ // PostCollectionUpdateEventListener
+ listeners.setPostCollectionUpdateEventListeners(
+ addIfNeeded(
+ listeners.getPostCollectionUpdateEventListeners(),
+ searchListener,
+ new PostCollectionUpdateEventListener[] { searchListener }
+ )
+ );
+ // Adding also as FlushEventListener to manage events out-of-transaction
+ listeners.setFlushEventListeners(
+ addIfNeeded(
+ listeners.getFlushEventListeners(),
+ searchListener,
+ new FlushEventListener[] { searchListener }
+ )
+ );
+ }
+
+ /**
+ * Verifies if a Search listener is already present; if not it will return
+ * a grown address adding the listener to it.
+ *
+ * @param <T> the type of listeners
+ * @param listeners
+ * @param searchEventListener
+ * @param toUseOnNull this is returned if listeners==null
+ *
+ * @return
+ */
+ private static <T> T[] addIfNeeded(T[] listeners, T searchEventListener, T[] toUseOnNull) {
+ if ( listeners == null ) {
+ return toUseOnNull;
+ }
+ else if ( !isPresentInListeners( listeners ) ) {
+ return appendToArray( listeners, searchEventListener );
+ }
+ else {
+ return listeners;
+ }
+ }
+
+ /**
+ * Will add one element to the end of an array.
+ *
+ * @param <T> The array type
+ * @param listeners The original array
+ * @param newElement The element to be added
+ *
+ * @return A new array containing all listeners and newElement.
+ */
+ @SuppressWarnings("unchecked")
+ private static <T> T[] appendToArray(T[] listeners, T newElement) {
+ int length = listeners.length;
+ T[] ret = ( T[] ) java.lang.reflect.Array.newInstance(
+ listeners.getClass().getComponentType(), length + 1
+ );
+ System.arraycopy( listeners, 0, ret, 0, length );
+ ret[length] = newElement;
+ return ret;
+ }
+
+ /**
+ * Verifies if a FullTextIndexEventListener is contained in the array.
+ *
+ * @param listeners
+ *
+ * @return true if it is contained in.
+ */
+ @SuppressWarnings("deprecation")
+ private static boolean isPresentInListeners(Object[] listeners) {
+ for ( Object eventListener : listeners ) {
+ if ( FullTextIndexEventListener.class == eventListener.getClass() ) {
+ return true;
+ }
+ if ( FullTextIndexCollectionEventListener.class == eventListener.getClass() ) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/EventListenerRegister.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,68 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.event;
+
+import org.hibernate.event.PostCollectionRecreateEvent;
+import org.hibernate.event.PostCollectionRecreateEventListener;
+import org.hibernate.event.PostCollectionRemoveEvent;
+import org.hibernate.event.PostCollectionRemoveEventListener;
+import org.hibernate.event.PostCollectionUpdateEvent;
+import org.hibernate.event.PostCollectionUpdateEventListener;
+
+/**
+ * @author Emmanuel Bernard
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener}
+ */
+@SuppressWarnings("serial")
+@Deprecated
+public class FullTextIndexCollectionEventListener extends FullTextIndexEventListener
+ implements PostCollectionRecreateEventListener,
+ PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener {
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostRecreateCollection(PostCollectionRecreateEvent)}
+ */
+ @Deprecated
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
+ processCollectionEvent( event );
+ }
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostRemoveCollection(PostCollectionRemoveEvent)}
+ */
+ @Deprecated
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
+ processCollectionEvent( event );
+ }
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link FullTextIndexEventListener#onPostUpdateCollection(PostCollectionUpdateEvent)}
+ */
+ @Deprecated
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
+ processCollectionEvent( event );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexCollectionEventListener.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexEventListener.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexEventListener.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,258 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.event;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+import java.lang.reflect.Field;
+import java.util.Map;
+
+import javax.transaction.Status;
+import javax.transaction.Synchronization;
+
+import org.slf4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.engine.EntityEntry;
+import org.hibernate.event.AbstractCollectionEvent;
+import org.hibernate.event.AbstractEvent;
+import org.hibernate.event.Destructible;
+import org.hibernate.event.EventSource;
+import org.hibernate.event.FlushEvent;
+import org.hibernate.event.FlushEventListener;
+import org.hibernate.event.Initializable;
+import org.hibernate.event.PostCollectionRecreateEvent;
+import org.hibernate.event.PostCollectionRecreateEventListener;
+import org.hibernate.event.PostCollectionRemoveEvent;
+import org.hibernate.event.PostCollectionRemoveEventListener;
+import org.hibernate.event.PostCollectionUpdateEvent;
+import org.hibernate.event.PostCollectionUpdateEventListener;
+import org.hibernate.event.PostDeleteEvent;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEvent;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEvent;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.impl.EventSourceTransactionContext;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.WeakIdentityHashMap;
+
+/**
+ * This listener supports setting a parent directory for all generated index files.
+ * It also supports setting the analyzer class to be used.
+ *
+ * @author Gavin King
+ * @author Emmanuel Bernard
+ * @author Mattias Arbin
+ * @author Sanne Grinovero
+ */
+//TODO implement and use a LockableDirectoryProvider that wraps a DP to handle the lock inside the LDP
+//TODO make this class final as soon as FullTextIndexCollectionEventListener is removed.
+@SuppressWarnings( "serial" )
+public class FullTextIndexEventListener implements PostDeleteEventListener,
+ PostInsertEventListener, PostUpdateEventListener,
+ PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener, FlushEventListener, Initializable, Destructible {
+
+ private static final Logger log = LoggerFactory.make();
+
+ protected boolean used;
+ protected SearchFactoryImplementor searchFactoryImplementor;
+
+ //only used by the FullTextIndexEventListener instance playing in the FlushEventListener role.
+ // transient because it's not serializable (and state doesn't need to live longer than a flush).
+ // final because it's initialization should be published to other threads.
+ // ! update the readObject() method in case of name changes !
+ // make sure the Synchronization doesn't contain references to Session, otherwise we'll leak memory.
+ private transient final Map<Session,Synchronization> flushSynch = new WeakIdentityHashMap<Session,Synchronization>(0);
+
+ /**
+ * Initialize method called by Hibernate Core when the SessionFactory starts
+ */
+
+ public void initialize(Configuration cfg) {
+ searchFactoryImplementor = ContextHolder.getOrBuildSearchFactory( cfg );
+ String indexingStrategy = searchFactoryImplementor.getIndexingStrategy();
+ if ( "event".equals( indexingStrategy ) ) {
+ used = searchFactoryImplementor.getDocumentBuildersIndexedEntities().size() != 0;
+ }
+ else if ( "manual".equals( indexingStrategy ) ) {
+ used = false;
+ }
+ }
+
+ public SearchFactoryImplementor getSearchFactoryImplementor() {
+ return searchFactoryImplementor;
+ }
+
+ public void onPostDelete(PostDeleteEvent event) {
+ if ( used ) {
+ final Class<?> entityType = event.getEntity().getClass();
+ if ( searchFactoryImplementor.getDocumentBuildersIndexedEntities().containsKey( entityType )
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entityType ) != null ) {
+ processWork( event.getEntity(), event.getId(), WorkType.DELETE, event );
+ }
+ }
+ }
+
+ public void onPostInsert(PostInsertEvent event) {
+ if ( used ) {
+ final Object entity = event.getEntity();
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) != null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() ) != null ) {
+ Serializable id = event.getId();
+ processWork( entity, id, WorkType.ADD, event );
+ }
+ }
+ }
+
+ public void onPostUpdate(PostUpdateEvent event) {
+ if ( used ) {
+ final Object entity = event.getEntity();
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) != null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() ) != null ) {
+ Serializable id = event.getId();
+ processWork( entity, id, WorkType.UPDATE, event );
+ }
+ }
+ }
+
+ protected <T> void processWork(T entity, Serializable id, WorkType workType, AbstractEvent event) {
+ Work<T> work = new Work<T>( entity, id, workType );
+ final EventSourceTransactionContext transactionContext = new EventSourceTransactionContext( event.getSession() );
+ searchFactoryImplementor.getWorker().performWork( work, transactionContext );
+ }
+
+ public void cleanup() {
+ searchFactoryImplementor.close();
+ }
+
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
+ processCollectionEvent( event );
+ }
+
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
+ processCollectionEvent( event );
+ }
+
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
+ processCollectionEvent( event );
+ }
+
+ protected void processCollectionEvent(AbstractCollectionEvent event) {
+ Object entity = event.getAffectedOwnerOrNull();
+ if ( entity == null ) {
+ //Hibernate cannot determine every single time the owner especially in case detached objects are involved
+ // or property-ref is used
+ //Should log really but we don't know if we're interested in this collection for indexing
+ return;
+ }
+ if ( used ) {
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( entity.getClass() ) != null
+ || searchFactoryImplementor.getDocumentBuilderContainedEntity( entity.getClass() ) != null ) {
+ Serializable id = getId( entity, event );
+ if ( id == null ) {
+ log.warn(
+ "Unable to reindex entity on collection change, id cannot be extracted: {}",
+ event.getAffectedOwnerEntityName()
+ );
+ return;
+ }
+ processWork( entity, id, WorkType.COLLECTION, event );
+ }
+ }
+ }
+
+ private Serializable getId(Object entity, AbstractCollectionEvent event) {
+ Serializable id = event.getAffectedOwnerIdOrNull();
+ if ( id == null ) {
+ //most likely this recovery is unnecessary since Hibernate Core probably try that
+ EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( entity );
+ id = entityEntry == null ? null : entityEntry.getId();
+ }
+ return id;
+ }
+
+ /**
+ * Make sure the indexes are updated right after the hibernate flush,
+ * avoiding object loading during a flush. Not needed during transactions.
+ */
+ public void onFlush(FlushEvent event) {
+ if ( used ) {
+ Session session = event.getSession();
+ Synchronization synchronization = flushSynch.get( session );
+ if ( synchronization != null ) {
+ //first cleanup
+ flushSynch.remove( session );
+ log.debug( "flush event causing index update out of transaction" );
+ synchronization.beforeCompletion();
+ synchronization.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+ }
+
+ /**
+ * Adds a synchronization to be performed in the onFlush method;
+ * should only be used as workaround for the case a flush is happening
+ * out of transaction.
+ * Warning: if the synchronization contains a hard reference
+ * to the Session proper cleanup is not guaranteed and memory leaks
+ * will happen.
+ * @param eventSource should be the Session doing the flush
+ * @param synchronization
+ */
+ public void addSynchronization(EventSource eventSource, Synchronization synchronization) {
+ this.flushSynch.put( eventSource, synchronization );
+ }
+
+ /* Might want to implement AutoFlushEventListener in future?
+ public void onAutoFlush(AutoFlushEvent event) throws HibernateException {
+ // Currently not needed as auto-flush is not happening
+ // when out of transaction.
+ }
+ */
+
+ private void writeObject(ObjectOutputStream os) throws IOException {
+ os.defaultWriteObject();
+ }
+
+ //needs to implement custom readObject to restore the transient fields
+ private void readObject(ObjectInputStream is) throws IOException, ClassNotFoundException, SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
+ is.defaultReadObject();
+ Class<FullTextIndexEventListener> cl = FullTextIndexEventListener.class;
+ Field f = cl.getDeclaredField("flushSynch");
+ f.setAccessible( true );
+ Map<Session,Synchronization> flushSynch = new WeakIdentityHashMap<Session,Synchronization>(0);
+ // setting a final field by reflection during a readObject is considered as safe as in a constructor:
+ f.set( this, flushSynch );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/event/FullTextIndexEventListener.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/AndDocIdSet.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/AndDocIdSet.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/AndDocIdSet.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,136 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import static java.lang.Math.max;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.util.OpenBitSet;
+
+/**
+ * A DocIdSet built as applying "AND" operation to a list of other DocIdSet(s).
+ * The DocIdSetIterator returned will return only document ids contained
+ * in all DocIdSet(s) handed to the constructor.
+ *
+ * @author Sanne Grinovero
+ */
+public class AndDocIdSet extends DocIdSet {
+
+ private DocIdSet docIdBitSet;
+ private final List<DocIdSet> andedDocIdSets;
+ private final int maxDocNumber;
+
+ public AndDocIdSet(List<DocIdSet> andedDocIdSets, int maxDocs) {
+ if ( andedDocIdSets == null || andedDocIdSets.size() < 2 )
+ throw new IllegalArgumentException( "To \"and\" some DocIdSet(s) they should be at least 2" );
+ this.andedDocIdSets = new ArrayList<DocIdSet>( andedDocIdSets ); // make a defensive mutable copy
+ this.maxDocNumber = maxDocs;
+ }
+
+ private synchronized DocIdSet buildBitset() throws IOException {
+ if ( docIdBitSet != null ) return docIdBitSet; // check for concurrent initialization
+ //TODO if all andedDocIdSets are actually DocIdBitSet, use their internal BitSet instead of next algo.
+ //TODO if some andedDocIdSets are DocIdBitSet, merge them first.
+ int size = andedDocIdSets.size();
+ DocIdSetIterator[] iterators = new DocIdSetIterator[size];
+ for (int i=0; i<size; i++) {
+ // build all iterators
+ iterators[i] = andedDocIdSets.get(i).iterator();
+ }
+ andedDocIdSets.clear(); // contained DocIdSets are not needed any more, release them.
+ docIdBitSet = makeDocIdSetOnAgreedBits( iterators ); // before returning hold a copy as cache
+ return docIdBitSet;
+ }
+
+ private final DocIdSet makeDocIdSetOnAgreedBits(final DocIdSetIterator[] iterators) throws IOException {
+ final int iteratorSize = iterators.length;
+ int targetPosition = Integer.MIN_VALUE;
+ int votes = 0;
+ // Each iterator can vote "ok" for the current target to
+ // be reached; when all agree the bit is set.
+ // if an iterator disagrees (it jumped longer), it's current position becomes the new targetPosition
+ // for the others and he is considered "first" in the voting round (every iterator votes for himself ;-)
+ int i = 0;
+ //iterator initialize, just one "next" for each DocIdSetIterator
+ for ( ; i<iteratorSize; i++ ) {
+ final DocIdSetIterator iterator = iterators[i];
+ final int position = iterator.nextDoc();
+ if ( position==DocIdSetIterator.NO_MORE_DOCS ) {
+ //current iterator has no values, so skip all
+ return DocIdSet.EMPTY_DOCIDSET;
+ }
+ if ( targetPosition==position ) {
+ votes++; //stopped as same position of others
+ }
+ else {
+ targetPosition = max( targetPosition, position );
+ if (targetPosition==position) //means it changed
+ votes=1;
+ }
+ }
+ final OpenBitSet result = new OpenBitSet( maxDocNumber );
+ // end iterator initialize
+ if (votes==iteratorSize) {
+ result.fastSet( targetPosition );
+ targetPosition++;
+ }
+ i = 0;
+ votes = 0; //could be smarter but would make the code even more complex for a minor optimization out of cycle.
+ // enter main loop:
+ while ( true ) {
+ final DocIdSetIterator iterator = iterators[i];
+ final int position = iterator.advance( targetPosition );
+ if ( position==DocIdSetIterator.NO_MORE_DOCS )
+ return result; //exit condition
+ if ( position == targetPosition ) {
+ if ( ++votes == iteratorSize ) {
+ result.fastSet( position );
+ votes = 0;
+ targetPosition++;
+ }
+ }
+ else {
+ votes = 1;
+ targetPosition = position;
+ }
+ i = ++i % iteratorSize;
+ }
+ }
+
+ @Override
+ public DocIdSetIterator iterator() throws IOException {
+ return buildBitset().iterator();
+ }
+
+ @Override
+ public boolean isCacheable() {
+ return true;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/AndDocIdSet.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/CachingWrapperFilter.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/CachingWrapperFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/CachingWrapperFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,106 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.util.SoftLimitMRUCache;
+
+/**
+ * A slightly different version of Lucene's original <code>CachingWrapperFilter</code> which
+ * uses <code>SoftReferences</code> instead of <code>WeakReferences</code> in order to cache
+ * the filter <code>BitSet</code>.
+ *
+ * @author Hardy Ferentschik
+ * @see org.apache.lucene.search.CachingWrapperFilter
+ * @see <a href="http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-174">HSEARCH-174</a>
+ */
+@SuppressWarnings("serial")
+public class CachingWrapperFilter extends Filter {
+
+ private static final Logger log = LoggerFactory.make();
+
+ public static final int DEFAULT_SIZE = 5;
+
+ /**
+ * The cache using soft references in order to store the filter bit sets.
+ */
+ private final SoftLimitMRUCache cache;
+
+ private final Filter filter;
+
+ /**
+ * @param filter Filter to cache results of
+ */
+ public CachingWrapperFilter(Filter filter) {
+ this(filter, DEFAULT_SIZE);
+ }
+
+ /**
+ * @param filter Filter to cache results of
+ */
+ public CachingWrapperFilter(Filter filter, int size) {
+ this.filter = filter;
+ log.debug( "Initialising SoftLimitMRUCache with hard ref size of {}", size );
+ this.cache = new SoftLimitMRUCache( size );
+ }
+
+ @Override
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ DocIdSet cached = (DocIdSet) cache.get( reader );
+ if ( cached != null ) {
+ return cached;
+ }
+ synchronized (cache) {
+ cached = (DocIdSet) cache.get( reader );
+ if ( cached != null ) {
+ return cached;
+ }
+ final DocIdSet docIdSet = filter.getDocIdSet( reader );
+ cache.put( reader, docIdSet );
+ return docIdSet;
+ }
+ }
+
+ public String toString() {
+ return this.getClass().getName() + "(" + filter + ")";
+ }
+
+ public boolean equals(Object o) {
+ if (!(o instanceof CachingWrapperFilter))
+ return false;
+ return this.filter.equals(((CachingWrapperFilter) o).filter);
+ }
+
+ public int hashCode() {
+ return filter.hashCode() ^ 0x1117BF25;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/CachingWrapperFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ChainedFilter.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ChainedFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ChainedFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,93 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.util.BitSet;
+import java.util.List;
+import java.util.ArrayList;
+import java.io.IOException;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * <p>A Filter capable of chaining other filters, so that it's
+ * possible to apply several filters on a Query.</p>
+ * <p>The resulting filter will only enable result Documents
+ * if no filter removed it.</p>
+ *
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class ChainedFilter extends Filter {
+
+ private static final long serialVersionUID = -6153052295766531920L;
+
+ private final List<Filter> chainedFilters = new ArrayList<Filter>();
+
+ public void addFilter(Filter filter) {
+ this.chainedFilters.add( filter );
+ }
+
+ public boolean isEmpty() {
+ return chainedFilters.size() == 0;
+ }
+
+ public BitSet bits(IndexReader reader) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ int size = chainedFilters.size();
+ if ( size == 0 ) {
+ throw new AssertionFailure( "Chainedfilter has no filters to chain for" );
+ }
+ else if ( size == 1 ) {
+ return chainedFilters.get( 0 ).getDocIdSet( reader );
+ }
+ else {
+ List<DocIdSet> subSets = new ArrayList<DocIdSet>( size );
+ for ( Filter f : chainedFilters ) {
+ subSets.add( f.getDocIdSet( reader ) );
+ }
+ subSets = FilterOptimizationHelper.mergeByBitAnds( subSets );
+ if ( subSets.size() == 1 ) {
+ return subSets.get( 0 );
+ }
+ return new AndDocIdSet( subSets, reader.maxDoc() );
+ }
+ }
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder( "ChainedFilter [" );
+ for (Filter filter : chainedFilters) {
+ sb.append( "\n " ).append( filter.toString() );
+ }
+ return sb.append("\n]" ).toString();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ChainedFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterCachingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterCachingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterCachingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.util.Properties;
+
+import org.apache.lucene.search.Filter;
+
+/**
+ * Defines the caching filter strategy
+ * implementations of getCachedFilter and addCachedFilter must be thread-safe
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FilterCachingStrategy {
+ /**
+ * initialize the strategy from the properties
+ * The Properties must not be changed
+ */
+ void initialize(Properties properties);
+ /**
+ * Retrieve the cached filter for a given key or null if not cached
+ */
+ Filter getCachedFilter(FilterKey key);
+
+ /**
+ * Propose a candidate filter for caching
+ */
+ void addCachedFilter(FilterKey key, Filter filter);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterCachingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterKey.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterKey.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterKey.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+/**
+ * The key object must implement equals / hashcode so that 2 keys are equals if and only if
+ * the given Filter types are the same and the set of parameters are the same.
+ * <p/>
+ * The FilterKey creator (ie the @Key method) does not have to inject <code>impl</code>
+ * It will be done by Hibernate Search
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class FilterKey {
+ // FilterKey implementations do not have to be thread-safe as FilterCachingStrategy ensure
+ // a memory barrier between usages
+ //
+
+ private Class impl;
+
+ /**
+ * Represent the @FullTextFilterDef.impl class
+ */
+ public Class getImpl() {
+ return impl;
+ }
+
+ public void setImpl(Class impl) {
+ this.impl = impl;
+ }
+
+ public abstract int hashCode();
+
+ public abstract boolean equals(Object obj);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterKey.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterOptimizationHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterOptimizationHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterOptimizationHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,122 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.List;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.util.DocIdBitSet;
+import org.apache.lucene.util.OpenBitSet;
+
+/**
+ * Helper class to apply some common optimizations when
+ * several Filters are applied.
+ *
+ * @author Sanne Grinovero
+ */
+public class FilterOptimizationHelper {
+
+ /**
+ * Returns a new list of DocIdSet, applying binary AND
+ * on all DocIdSet implemented by using BitSet or OpenBitSet.
+ * @param docIdSets
+ * @return the same list if no changes were done
+ */
+ public static List<DocIdSet> mergeByBitAnds(List<DocIdSet> docIdSets) {
+ int size = docIdSets.size();
+ List<OpenBitSet> openBitSets = new ArrayList<OpenBitSet>( size );
+ List<DocIdBitSet> docIdBitSets = new ArrayList<DocIdBitSet>( size );
+ List<DocIdSet> nonMergeAble = new ArrayList<DocIdSet>( size );
+ for (DocIdSet set : docIdSets) {
+ if (set instanceof OpenBitSet) {
+ openBitSets.add( (OpenBitSet) set );
+ }
+ else if (set instanceof DocIdBitSet) {
+ docIdBitSets.add( (DocIdBitSet) set );
+ }
+ else {
+ nonMergeAble.add( set );
+ }
+ }
+ if ( openBitSets.size() <= 1 && docIdBitSets.size() <= 1 ) {
+ //skip all work as no optimization is possible
+ return docIdSets;
+ }
+ if ( openBitSets.size() > 0 ) {
+ nonMergeAble.add( mergeByBitAndsForOpenBitSet( openBitSets ) );
+ }
+ if ( docIdBitSets.size() > 0 ) {
+ nonMergeAble.add( mergeByBitAndsForDocIdBitSet( docIdBitSets ) );
+ }
+ return nonMergeAble;
+ }
+
+ /**
+ * Merges all DocIdBitSet in a new DocIdBitSet using
+ * binary AND operations, which is usually more efficient
+ * than using an iterator.
+ * @param docIdBitSets
+ * @return a new DocIdBitSet, or the first element if only
+ * one element was found in the list.
+ */
+ private static DocIdBitSet mergeByBitAndsForDocIdBitSet(List<DocIdBitSet> docIdBitSets) {
+ int listSize = docIdBitSets.size();
+ if ( listSize == 1 ) {
+ return docIdBitSets.get( 0 );
+ }
+ //we need to copy the first BitSet because BitSet is modified by .logicalOp
+ BitSet result = (BitSet) docIdBitSets.get( 0 ).getBitSet().clone();
+ for ( int i=1; i<listSize; i++ ) {
+ BitSet bitSet = docIdBitSets.get( i ).getBitSet();
+ result.and( bitSet );
+ }
+ return new DocIdBitSet( result );
+ }
+
+ /**
+ * Merges all OpenBitSet in a new OpenBitSet using
+ * binary AND operations, which is usually more efficient
+ * than using an iterator.
+ * @param openBitSets
+ * @return a new OpenBitSet, or the first element if only
+ * one element was found in the list.
+ */
+ private static OpenBitSet mergeByBitAndsForOpenBitSet(List<OpenBitSet> openBitSets) {
+ int listSize = openBitSets.size();
+ if ( listSize == 1 ) {
+ return openBitSets.get( 0 );
+ }
+ //we need to copy the first OpenBitSet because BitSet is modified by .logicalOp
+ OpenBitSet result = (OpenBitSet) openBitSets.get( 0 ).clone();
+ for ( int i=1; i<listSize; i++ ) {
+ OpenBitSet openSet = openBitSets.get( i );
+ result.intersect( openSet );
+ }
+ return result;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FilterOptimizationHelper.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FullTextFilterImplementor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FullTextFilterImplementor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FullTextFilterImplementor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import org.hibernate.search.FullTextFilter;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface FullTextFilterImplementor extends FullTextFilter {
+ /**
+ * @return Returns the Filter name
+ */
+ String getName();
+
+ //TODO should we expose Map<String, Object> getParameters()
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/FullTextFilterImplementor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,58 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.util.Properties;
+
+import org.apache.lucene.search.Filter;
+import org.hibernate.search.Environment;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.util.SoftLimitMRUCache;
+
+/**
+ * Keep the most recently used Filters in the cache
+ * The cache is at least as big as <code>hibernate.search.filter.cache_strategy.size</code>
+ * Above this limit, Filters are kept as soft references
+ *
+ * @author Emmanuel Bernard
+ */
+public class MRUFilterCachingStrategy implements FilterCachingStrategy {
+ private static final int DEFAULT_SIZE = 128;
+ private SoftLimitMRUCache cache;
+ private static final String SIZE = Environment.FILTER_CACHING_STRATEGY + ".size";
+
+ public void initialize(Properties properties) {
+ int size = ConfigurationParseHelper.getIntValue( properties, SIZE, DEFAULT_SIZE );
+ cache = new SoftLimitMRUCache( size );
+ }
+
+ public Filter getCachedFilter(FilterKey key) {
+ return (Filter) cache.get( key );
+ }
+
+ public void addCachedFilter(FilterKey key, Filter filter) {
+ cache.put( key, filter );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/MRUFilterCachingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ShardSensitiveOnlyFilter.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ShardSensitiveOnlyFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ShardSensitiveOnlyFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+/**
+ * When using this class in @FullTextFilterDef.impl, Hibernate Search
+ * considers the filter to be only influencing the sharding strategy.
+ *
+ * This filter is not applied on the results of the Lucene query.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ShardSensitiveOnlyFilter {
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/ShardSensitiveOnlyFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/StandardFilterKey.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/StandardFilterKey.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/StandardFilterKey.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.filter;
+
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * Implements a filter key usign all injected parameters to compute
+ * equals and hashCode
+ * the order the parameters are added is significant
+ *
+ * @author Emmanuel Bernard
+ */
+public class StandardFilterKey extends FilterKey {
+ private final List parameters = new ArrayList();
+ private boolean implSet;
+
+
+ public void setImpl(Class impl) {
+ super.setImpl( impl );
+ //add impl once and only once
+ if (implSet) {
+ parameters.set( 0, impl );
+ }
+ else {
+ implSet = true;
+ parameters.add( 0, impl );
+ }
+ }
+
+ public void addParameter(Object value) {
+ parameters.add( value );
+ }
+ public int hashCode() {
+ int hash = 23;
+ for (Object param : parameters) {
+ hash = 31*hash + (param != null ? param.hashCode() : 0);
+ }
+ return hash;
+ }
+
+ public boolean equals(Object obj) {
+ if ( ! ( obj instanceof StandardFilterKey ) ) return false;
+ StandardFilterKey that = (StandardFilterKey) obj;
+ int size = parameters.size();
+ if ( size != that.parameters.size() ) return false;
+ for (int index = 0 ; index < size; index++) {
+ Object paramThis = parameters.get( index );
+ Object paramThat = that.parameters.get( index );
+ if (paramThis == null && paramThat != null) return false;
+ if (paramThis != null && ! paramThis.equals( paramThat ) ) return false;
+ }
+ return true;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/filter/StandardFilterKey.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/FullTextSessionImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/FullTextSessionImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,808 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.io.Serializable;
+import java.sql.Connection;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.hibernate.CacheMode;
+import org.hibernate.Criteria;
+import org.hibernate.EntityMode;
+import org.hibernate.Filter;
+import org.hibernate.FlushMode;
+import org.hibernate.Hibernate;
+import org.hibernate.HibernateException;
+import org.hibernate.Interceptor;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ReplicationMode;
+import org.hibernate.SQLQuery;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.UnknownProfileException;
+import org.hibernate.LockOptions;
+import org.hibernate.classic.Session;
+import org.hibernate.collection.PersistentCollection;
+import org.hibernate.engine.EntityKey;
+import org.hibernate.engine.PersistenceContext;
+import org.hibernate.engine.QueryParameters;
+import org.hibernate.engine.SessionFactoryImplementor;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.LoadQueryInfluencers;
+import org.hibernate.engine.NonFlushedChanges;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.engine.query.sql.NativeSQLQuerySpecification;
+import org.hibernate.event.EventListeners;
+import org.hibernate.event.EventSource;
+import org.hibernate.impl.CriteriaImpl;
+import org.hibernate.jdbc.Batcher;
+import org.hibernate.jdbc.JDBCContext;
+import org.hibernate.loader.custom.CustomQuery;
+import org.hibernate.persister.entity.EntityPersister;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.MassIndexer;
+import org.hibernate.search.backend.TransactionContext;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.impl.EventSourceTransactionContext;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.query.FullTextQueryImpl;
+import org.hibernate.search.util.ContextHelper;
+import org.hibernate.stat.SessionStatistics;
+import org.hibernate.type.Type;
+
+/**
+ * Lucene full text search aware session.
+ *
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ * @author Hardy Ferentschik
+ */
+@SuppressWarnings("deprecation")
+public class FullTextSessionImpl implements FullTextSession, SessionImplementor {
+
+ private final Session session;
+ private final SessionImplementor sessionImplementor;
+ private transient SearchFactoryImplementor searchFactory;
+ private final TransactionContext transactionContext;
+
+
+ public FullTextSessionImpl(org.hibernate.Session session) {
+ this.session = ( Session ) session;
+ this.transactionContext = new EventSourceTransactionContext( ( EventSource ) session );
+ this.sessionImplementor = ( SessionImplementor ) session;
+ }
+
+ /**
+ * Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses)
+ * If entities is empty, include all indexed entities
+ *
+ * @param entities must be immutable for the lifetime of the query object
+ */
+ public FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class... entities) {
+ return new FullTextQueryImpl( luceneQuery, entities, sessionImplementor, new ParameterMetadata( null, null ) );
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public <T> void purgeAll(Class<T> entityType) {
+ purge( entityType, null );
+ }
+
+ public void flushToIndexes() {
+ SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
+ searchFactoryImplementor.getWorker().flushWorks( transactionContext );
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public <T> void purge(Class<T> entityType, Serializable id) {
+ if ( entityType == null ) {
+ return;
+ }
+
+ Set<Class<?>> targetedClasses = getSearchFactoryImplementor().getIndexedTypesPolymorphic(
+ new Class[] {
+ entityType
+ }
+ );
+ if ( targetedClasses.isEmpty() ) {
+ String msg = entityType.getName() + " is not an indexed entity or a subclass of an indexed entity";
+ throw new IllegalArgumentException( msg );
+ }
+
+ for ( Class<?> clazz : targetedClasses ) {
+ if ( id == null ) {
+ createAndPerformWork( clazz, null, WorkType.PURGE_ALL );
+ }
+ else {
+ createAndPerformWork( clazz, id, WorkType.PURGE );
+ }
+ }
+ }
+
+ private <T> void createAndPerformWork(Class<T> clazz, Serializable id, WorkType workType) {
+ Work<T> work;
+ work = new Work<T>( clazz, id, workType );
+ getSearchFactoryImplementor().getWorker().performWork( work, transactionContext );
+ }
+
+ /**
+ * (Re-)index an entity.
+ * The entity must be associated with the session and non indexable entities are ignored.
+ *
+ * @param entity The entity to index - must not be <code>null</code>.
+ *
+ * @throws IllegalArgumentException if entity is null or not an @Indexed entity
+ */
+ public <T> void index(T entity) {
+ if ( entity == null ) {
+ throw new IllegalArgumentException( "Entity to index should not be null" );
+ }
+
+ Class<?> clazz = Hibernate.getClass( entity );
+ //TODO cache that at the FTSession level
+ SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
+ //not strictly necessary but a small optimization
+ if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz ) == null ) {
+ String msg = "Entity to index is not an @Indexed entity: " + entity.getClass().getName();
+ throw new IllegalArgumentException( msg );
+ }
+ Serializable id = session.getIdentifier( entity );
+ Work<T> work = new Work<T>( entity, id, WorkType.INDEX );
+ searchFactoryImplementor.getWorker().performWork( work, transactionContext );
+
+ //TODO
+ //need to add elements in a queue kept at the Session level
+ //the queue will be processed by a Lucene(Auto)FlushEventListener
+ //note that we could keep this queue somewhere in the event listener in the mean time but that requires
+ //a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
+ //another solution would be to subclass SessionImpl instead of having this LuceneSession delegation model
+ //this is an open discussion
+ }
+
+ public MassIndexer createIndexer(Class<?>... types) {
+ if ( types.length == 0 ) {
+ return new MassIndexerImpl( getSearchFactoryImplementor(), getSessionFactory(), Object.class );
+ }
+ else {
+ return new MassIndexerImpl( getSearchFactoryImplementor(), getSessionFactory(), types );
+ }
+ }
+
+ public SearchFactory getSearchFactory() {
+ return getSearchFactoryImplementor();
+ }
+
+ private SearchFactoryImplementor getSearchFactoryImplementor() {
+ if ( searchFactory == null ) {
+ searchFactory = ContextHelper.getSearchFactory( session );
+ }
+ return searchFactory;
+ }
+
+ public Query createSQLQuery(String sql, String returnAlias, Class returnClass) {
+ return session.createSQLQuery( sql, returnAlias, returnClass );
+ }
+
+ public Query createSQLQuery(String sql, String[] returnAliases, Class[] returnClasses) {
+ return session.createSQLQuery( sql, returnAliases, returnClasses );
+ }
+
+ public int delete(String query) throws HibernateException {
+ return session.delete( query );
+ }
+
+ public int delete(String query, Object value, Type type) throws HibernateException {
+ return session.delete( query, value, type );
+ }
+
+ public int delete(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.delete( query, values, types );
+ }
+
+ public Collection filter(Object collection, String filter) throws HibernateException {
+ return session.filter( collection, filter );
+ }
+
+ public Collection filter(Object collection, String filter, Object value, Type type) throws HibernateException {
+ return session.filter( collection, filter, value, type );
+ }
+
+ public Collection filter(Object collection, String filter, Object[] values, Type[] types)
+ throws HibernateException {
+ return session.filter( collection, filter, values, types );
+ }
+
+ public List find(String query) throws HibernateException {
+ return session.find( query );
+ }
+
+ public List find(String query, Object value, Type type) throws HibernateException {
+ return session.find( query, value, type );
+ }
+
+ public List find(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.find( query, values, types );
+ }
+
+ public Iterator iterate(String query) throws HibernateException {
+ return session.iterate( query );
+ }
+
+ public Iterator iterate(String query, Object value, Type type) throws HibernateException {
+ return session.iterate( query, value, type );
+ }
+
+ public Iterator iterate(String query, Object[] values, Type[] types) throws HibernateException {
+ return session.iterate( query, values, types );
+ }
+
+ public void save(String entityName, Object object, Serializable id) throws HibernateException {
+ session.save( entityName, object, id );
+ }
+
+ public void save(Object object, Serializable id) throws HibernateException {
+ session.save( object, id );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object );
+ }
+
+ public Object saveOrUpdateCopy(String entityName, Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( entityName, object, id );
+ }
+
+ public Object saveOrUpdateCopy(Object object) throws HibernateException {
+ return session.saveOrUpdateCopy( object );
+ }
+
+ public Object saveOrUpdateCopy(Object object, Serializable id) throws HibernateException {
+ return session.saveOrUpdateCopy( object, id );
+ }
+
+ public void update(String entityName, Object object, Serializable id) throws HibernateException {
+ session.update( entityName, object, id );
+ }
+
+ public void update(Object object, Serializable id) throws HibernateException {
+ session.update( object, id );
+ }
+
+ public Transaction beginTransaction() throws HibernateException {
+ return session.beginTransaction();
+ }
+
+ public void cancelQuery() throws HibernateException {
+ session.cancelQuery();
+ }
+
+ public void clear() {
+ //FIXME should session clear work with the lucene queue
+ session.clear();
+ }
+
+ public Connection close() throws HibernateException {
+ return session.close();
+ }
+
+ public Connection connection() throws HibernateException {
+ return session.connection();
+ }
+
+ public boolean contains(Object object) {
+ return session.contains( object );
+ }
+
+ public Criteria createCriteria(String entityName) {
+ return session.createCriteria( entityName );
+ }
+
+ public Criteria createCriteria(String entityName, String alias) {
+ return session.createCriteria( entityName, alias );
+ }
+
+ public Criteria createCriteria(Class persistentClass) {
+ return session.createCriteria( persistentClass );
+ }
+
+ public Criteria createCriteria(Class persistentClass, String alias) {
+ return session.createCriteria( persistentClass, alias );
+ }
+
+ public Query createFilter(Object collection, String queryString) throws HibernateException {
+ return session.createFilter( collection, queryString );
+ }
+
+ public Query createQuery(String queryString) throws HibernateException {
+ return session.createQuery( queryString );
+ }
+
+ public SQLQuery createSQLQuery(String queryString) throws HibernateException {
+ return session.createSQLQuery( queryString );
+ }
+
+ public void delete(String entityName, Object object) throws HibernateException {
+ session.delete( entityName, object );
+ }
+
+ public void delete(Object object) throws HibernateException {
+ session.delete( object );
+ }
+
+ public void disableFilter(String filterName) {
+ session.disableFilter( filterName );
+ }
+
+ public Connection disconnect() throws HibernateException {
+ return session.disconnect();
+ }
+
+ public Filter enableFilter(String filterName) {
+ return session.enableFilter( filterName );
+ }
+
+ public void evict(Object object) throws HibernateException {
+ session.evict( object );
+ }
+
+ public void flush() throws HibernateException {
+ session.flush();
+ }
+
+ public Object get(Class clazz, Serializable id) throws HibernateException {
+ return session.get( clazz, id );
+ }
+
+ public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( clazz, id, lockMode );
+ }
+
+ public Object get(Class clazz, Serializable id, LockOptions lockOptions) throws HibernateException {
+ return session.get( clazz, id, lockOptions );
+ }
+
+ public Object get(String entityName, Serializable id) throws HibernateException {
+ return session.get( entityName, id );
+ }
+
+ public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.get( entityName, id, lockMode );
+ }
+
+ public Object get(String entityName, Serializable id, LockOptions lockOptions) throws HibernateException {
+ return session.get( entityName, id, lockOptions );
+ }
+
+ public CacheMode getCacheMode() {
+ return session.getCacheMode();
+ }
+
+ public LockMode getCurrentLockMode(Object object) throws HibernateException {
+ return session.getCurrentLockMode( object );
+ }
+
+ public Filter getEnabledFilter(String filterName) {
+ return session.getEnabledFilter( filterName );
+ }
+
+ public Interceptor getInterceptor() {
+ return sessionImplementor.getInterceptor();
+ }
+
+ public void setAutoClear(boolean enabled) {
+ sessionImplementor.setAutoClear( enabled );
+ }
+
+ public boolean isTransactionInProgress() {
+ return sessionImplementor.isTransactionInProgress();
+ }
+
+ public void initializeCollection(PersistentCollection collection, boolean writing) throws HibernateException {
+ sessionImplementor.initializeCollection( collection, writing );
+ }
+
+ public Object internalLoad(String entityName, Serializable id, boolean eager, boolean nullable)
+ throws HibernateException {
+ return sessionImplementor.internalLoad( entityName, id, eager, nullable );
+ }
+
+ public Object immediateLoad(String entityName, Serializable id) throws HibernateException {
+ return sessionImplementor.immediateLoad( entityName, id );
+ }
+
+ public long getTimestamp() {
+ return sessionImplementor.getTimestamp();
+ }
+
+ public SessionFactoryImplementor getFactory() {
+ return sessionImplementor.getFactory();
+ }
+
+ public Batcher getBatcher() {
+ return sessionImplementor.getBatcher();
+ }
+
+ public List list(String query, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.list( query, queryParameters );
+ }
+
+ public Iterator iterate(String query, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.iterate( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(String query, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.scroll( query, queryParameters );
+ }
+
+ public ScrollableResults scroll(CriteriaImpl criteria, ScrollMode scrollMode) {
+ return sessionImplementor.scroll( criteria, scrollMode );
+ }
+
+ public List list(CriteriaImpl criteria) {
+ return sessionImplementor.list( criteria );
+ }
+
+ public List listFilter(Object collection, String filter, QueryParameters queryParameters)
+ throws HibernateException {
+ return sessionImplementor.listFilter( collection, filter, queryParameters );
+ }
+
+ public Iterator iterateFilter(Object collection, String filter, QueryParameters queryParameters)
+ throws HibernateException {
+ return sessionImplementor.iterateFilter( collection, filter, queryParameters );
+ }
+
+ public EntityPersister getEntityPersister(String entityName, Object object) throws HibernateException {
+ return sessionImplementor.getEntityPersister( entityName, object );
+ }
+
+ public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException {
+ return sessionImplementor.getEntityUsingInterceptor( key );
+ }
+
+ public void afterTransactionCompletion(boolean successful, Transaction tx) {
+ sessionImplementor.afterTransactionCompletion( successful, tx );
+ }
+
+ public void beforeTransactionCompletion(Transaction tx) {
+ sessionImplementor.beforeTransactionCompletion( tx );
+ }
+
+ public Serializable getContextEntityIdentifier(Object object) {
+ return sessionImplementor.getContextEntityIdentifier( object );
+ }
+
+ public String bestGuessEntityName(Object object) {
+ return sessionImplementor.bestGuessEntityName( object );
+ }
+
+ public String guessEntityName(Object entity) throws HibernateException {
+ return sessionImplementor.guessEntityName( entity );
+ }
+
+ public Object instantiate(String entityName, Serializable id) throws HibernateException {
+ return sessionImplementor.instantiate( entityName, id );
+ }
+
+ public List listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.listCustomQuery( customQuery, queryParameters );
+ }
+
+ public ScrollableResults scrollCustomQuery(CustomQuery customQuery, QueryParameters queryParameters)
+ throws HibernateException {
+ return sessionImplementor.scrollCustomQuery( customQuery, queryParameters );
+ }
+
+ public List list(NativeSQLQuerySpecification spec, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.list( spec, queryParameters );
+ }
+
+ public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters queryParameters)
+ throws HibernateException {
+ return sessionImplementor.scroll( spec, queryParameters );
+ }
+
+ public Object getFilterParameterValue(String filterParameterName) {
+ return sessionImplementor.getFilterParameterValue( filterParameterName );
+ }
+
+ public Type getFilterParameterType(String filterParameterName) {
+ return sessionImplementor.getFilterParameterType( filterParameterName );
+ }
+
+ public Map getEnabledFilters() {
+ return sessionImplementor.getEnabledFilters();
+ }
+
+ public int getDontFlushFromFind() {
+ return sessionImplementor.getDontFlushFromFind();
+ }
+
+ public EventListeners getListeners() {
+ return sessionImplementor.getListeners();
+ }
+
+ public PersistenceContext getPersistenceContext() {
+ return sessionImplementor.getPersistenceContext();
+ }
+
+ public int executeUpdate(String query, QueryParameters queryParameters) throws HibernateException {
+ return sessionImplementor.executeUpdate( query, queryParameters );
+ }
+
+ public int executeNativeUpdate(NativeSQLQuerySpecification specification, QueryParameters queryParameters)
+ throws HibernateException {
+ return sessionImplementor.executeNativeUpdate( specification, queryParameters );
+ }
+
+ public NonFlushedChanges getNonFlushedChanges() throws HibernateException {
+ return sessionImplementor.getNonFlushedChanges();
+ }
+
+ public void applyNonFlushedChanges(NonFlushedChanges nonFlushedChanges) throws HibernateException {
+ sessionImplementor.applyNonFlushedChanges( nonFlushedChanges );
+ }
+
+ public EntityMode getEntityMode() {
+ return session.getEntityMode();
+ }
+
+ public String getEntityName(Object object) throws HibernateException {
+ return session.getEntityName( object );
+ }
+
+ public FlushMode getFlushMode() {
+ return session.getFlushMode();
+ }
+
+ public Serializable getIdentifier(Object object) throws HibernateException {
+ return session.getIdentifier( object );
+ }
+
+ public Query getNamedQuery(String queryName) throws HibernateException {
+ return session.getNamedQuery( queryName );
+ }
+
+ public Query getNamedSQLQuery(String name) {
+ return sessionImplementor.getNamedSQLQuery( name );
+ }
+
+ public boolean isEventSource() {
+ return sessionImplementor.isEventSource();
+ }
+
+ public void afterScrollOperation() {
+ sessionImplementor.afterScrollOperation();
+ }
+
+ public void setFetchProfile(String name) {
+ sessionImplementor.setFetchProfile( name );
+ }
+
+ public String getFetchProfile() {
+ return sessionImplementor.getFetchProfile();
+ }
+
+ public JDBCContext getJDBCContext() {
+ return sessionImplementor.getJDBCContext();
+ }
+
+ public boolean isClosed() {
+ return sessionImplementor.isClosed();
+ }
+
+ public LoadQueryInfluencers getLoadQueryInfluencers() {
+ return sessionImplementor.getLoadQueryInfluencers();
+ }
+
+ public org.hibernate.Session getSession(EntityMode entityMode) {
+ return session.getSession( entityMode );
+ }
+
+ public SessionFactory getSessionFactory() {
+ return session.getSessionFactory();
+ }
+
+ public SessionStatistics getStatistics() {
+ return session.getStatistics();
+ }
+
+ public boolean isReadOnly(Object entityOrProxy) {
+ return session.isReadOnly( entityOrProxy );
+ }
+
+ public Transaction getTransaction() {
+ return session.getTransaction();
+ }
+
+ public boolean isConnected() {
+ return session.isConnected();
+ }
+
+ public boolean isDirty() throws HibernateException {
+ return session.isDirty();
+ }
+
+ public boolean isDefaultReadOnly() {
+ return session.isDefaultReadOnly();
+ }
+
+ public boolean isOpen() {
+ return session.isOpen();
+ }
+
+ public Object load(String entityName, Serializable id) throws HibernateException {
+ return session.load( entityName, id );
+ }
+
+ public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( entityName, id, lockMode );
+ }
+
+ public Object load(String entityName, Serializable id, LockOptions lockOptions) throws HibernateException {
+ return session.load( entityName, id, lockOptions );
+ }
+
+ public void load(Object object, Serializable id) throws HibernateException {
+ session.load( object, id );
+ }
+
+ public Object load(Class theClass, Serializable id) throws HibernateException {
+ return session.load( theClass, id );
+ }
+
+ public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException {
+ return session.load( theClass, id, lockMode );
+ }
+
+ public Object load(Class theClass, Serializable id, LockOptions lockOptions) throws HibernateException {
+ return session.load( theClass, id, lockOptions );
+ }
+
+ public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException {
+ session.lock( entityName, object, lockMode );
+ }
+
+ public LockRequest buildLockRequest(LockOptions lockOptions) {
+ return session.buildLockRequest( lockOptions );
+ }
+
+ public void lock(Object object, LockMode lockMode) throws HibernateException {
+ session.lock( object, lockMode );
+ }
+
+ public Object merge(String entityName, Object object) throws HibernateException {
+ return session.merge( entityName, object );
+ }
+
+ public Object merge(Object object) throws HibernateException {
+ return session.merge( object );
+ }
+
+ public void persist(String entityName, Object object) throws HibernateException {
+ session.persist( entityName, object );
+ }
+
+ public void persist(Object object) throws HibernateException {
+ session.persist( object );
+ }
+
+ public void reconnect() throws HibernateException {
+ session.reconnect();
+ }
+
+ public void reconnect(Connection connection) throws HibernateException {
+ session.reconnect( connection );
+ }
+
+ public void refresh(Object object) throws HibernateException {
+ session.refresh( object );
+ }
+
+ public void refresh(Object object, LockMode lockMode) throws HibernateException {
+ session.refresh( object, lockMode );
+ }
+
+ public void refresh(Object object, LockOptions lockOptions) throws HibernateException {
+ session.refresh( object, lockOptions );
+ }
+
+ public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( entityName, object, replicationMode );
+ }
+
+ public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException {
+ session.replicate( object, replicationMode );
+ }
+
+ public Serializable save(String entityName, Object object) throws HibernateException {
+ return session.save( entityName, object );
+ }
+
+ public Serializable save(Object object) throws HibernateException {
+ return session.save( object );
+ }
+
+ public void saveOrUpdate(String entityName, Object object) throws HibernateException {
+ session.saveOrUpdate( entityName, object );
+ }
+
+ public void saveOrUpdate(Object object) throws HibernateException {
+ session.saveOrUpdate( object );
+ }
+
+ public void setCacheMode(CacheMode cacheMode) {
+ session.setCacheMode( cacheMode );
+ }
+
+ public void setDefaultReadOnly(boolean readOnly) {
+ session.setDefaultReadOnly( readOnly );
+ }
+
+ public void setFlushMode(FlushMode flushMode) {
+ session.setFlushMode( flushMode );
+ }
+
+ public void setReadOnly(Object entity, boolean readOnly) {
+ session.setReadOnly( entity, readOnly );
+ }
+
+ public void doWork(org.hibernate.jdbc.Work work) throws HibernateException {
+ session.doWork( work );
+ }
+
+ public void update(String entityName, Object object) throws HibernateException {
+ session.update( entityName, object );
+ }
+
+ public void update(Object object) throws HibernateException {
+ session.update( object );
+ }
+
+ public boolean isFetchProfileEnabled(String name) throws UnknownProfileException {
+ return session.isFetchProfileEnabled( name );
+ }
+
+ public void enableFetchProfile(String name) throws UnknownProfileException {
+ session.enableFetchProfile( name );
+ }
+
+ public void disableFetchProfile(String name) throws UnknownProfileException {
+ session.disableFetchProfile( name );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/FullTextSessionImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/InitContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/InitContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/InitContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,201 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.search.Similarity;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.util.DelegateNamedAnalyzer;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.PluginLoader;
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.util.StringHelper;
+import org.slf4j.Logger;
+
+/**
+ * Provides access to some default configuration settings (eg default <code>Analyzer</code> or default
+ * <code>Similarity</code>) and checks whether certain optional libraries are available.
+ *
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class InitContext {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final Map<String, AnalyzerDef> analyzerDefs = new HashMap<String, AnalyzerDef>();
+ private final List<DelegateNamedAnalyzer> lazyAnalyzers = new ArrayList<DelegateNamedAnalyzer>();
+ private final Analyzer defaultAnalyzer;
+ private final Similarity defaultSimilarity;
+ private final boolean solrPresent;
+ private final boolean jpaPresent;
+
+ public InitContext(SearchConfiguration cfg) {
+ defaultAnalyzer = initAnalyzer(cfg);
+ defaultSimilarity = initSimilarity(cfg);
+ solrPresent = isPresent( "org.apache.solr.analysis.TokenizerFactory" );
+ jpaPresent = isPresent( "javax.persistence.Id" );
+ }
+
+ public void addAnalyzerDef(AnalyzerDef ann) {
+ //FIXME somehow remember where the analyzerDef comes from and raise an exception if an analyzerDef
+ //with the same name from two different places are added
+ //multiple adding from the same place is required to deal with inheritance hierarchy processed multiple times
+ if ( ann != null && analyzerDefs.put( ann.name(), ann ) != null ) {
+ //throw new SearchException("Multiple AnalyzerDef with the same name: " + name);
+ }
+ }
+
+ public Analyzer buildLazyAnalyzer(String name) {
+ final DelegateNamedAnalyzer delegateNamedAnalyzer = new DelegateNamedAnalyzer( name );
+ lazyAnalyzers.add(delegateNamedAnalyzer);
+ return delegateNamedAnalyzer;
+ }
+
+ public List<DelegateNamedAnalyzer> getLazyAnalyzers() {
+ return lazyAnalyzers;
+ }
+
+ /**
+ * Initializes the Lucene analyzer to use by reading the analyzer class from the configuration and instantiating it.
+ *
+ * @param cfg
+ * The current configuration.
+ * @return The Lucene analyzer to use for tokenisation.
+ */
+ private Analyzer initAnalyzer(SearchConfiguration cfg) {
+ Class analyzerClass;
+ String analyzerClassName = cfg.getProperty( Environment.ANALYZER_CLASS );
+ if ( analyzerClassName != null ) {
+ try {
+ analyzerClass = ReflectHelper.classForName( analyzerClassName );
+ } catch (Exception e) {
+ return buildLazyAnalyzer( analyzerClassName );
+ }
+ } else {
+ analyzerClass = StandardAnalyzer.class;
+ }
+ Analyzer defaultAnalyzer = PluginLoader.instanceFromClass( Analyzer.class,
+ analyzerClass, "Lucene analyzer" );
+ return defaultAnalyzer;
+ }
+
+ /**
+ * Initializes the Lucene similarity to use.
+ *
+ * @param cfg the search configuration.
+ * @return returns the default similarity class.
+ */
+ private Similarity initSimilarity(SearchConfiguration cfg) {
+ String similarityClassName = cfg.getProperty(Environment.SIMILARITY_CLASS);
+ Similarity defaultSimilarity;
+ if ( StringHelper.isEmpty( similarityClassName ) ) {
+ defaultSimilarity = Similarity.getDefault();
+ }
+ else {
+ defaultSimilarity = PluginLoader.instanceFromName(
+ Similarity.class, similarityClassName, InitContext.class, "default similarity" );
+ }
+ log.debug( "Using default similarity implementation: {}", defaultSimilarity.getClass().getName() );
+ return defaultSimilarity;
+ }
+
+ public Analyzer getDefaultAnalyzer() {
+ return defaultAnalyzer;
+ }
+
+ public Similarity getDefaultSimilarity() {
+ return defaultSimilarity;
+ }
+
+ public Map<String, Analyzer> initLazyAnalyzers() {
+ Map<String, Analyzer> initializedAnalyzers = new HashMap<String, Analyzer>( analyzerDefs.size() );
+
+ for (DelegateNamedAnalyzer namedAnalyzer : lazyAnalyzers) {
+ String name = namedAnalyzer.getName();
+ if ( initializedAnalyzers.containsKey( name ) ) {
+ namedAnalyzer.setDelegate( initializedAnalyzers.get( name ) );
+ }
+ else {
+ if ( analyzerDefs.containsKey( name ) ) {
+ final Analyzer analyzer = buildAnalyzer( analyzerDefs.get( name ) );
+ namedAnalyzer.setDelegate( analyzer );
+ initializedAnalyzers.put( name, analyzer );
+ }
+ else {
+ throw new SearchException("Analyzer found with an unknown definition: " + name);
+ }
+ }
+ }
+
+ //initialize the remaining definitions
+ for ( Map.Entry<String, AnalyzerDef> entry : analyzerDefs.entrySet() ) {
+ if ( ! initializedAnalyzers.containsKey( entry.getKey() ) ) {
+ final Analyzer analyzer = buildAnalyzer( entry.getValue() );
+ initializedAnalyzers.put( entry.getKey(), analyzer );
+ }
+ }
+ return Collections.unmodifiableMap( initializedAnalyzers );
+ }
+
+ private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
+ if ( ! solrPresent ) {
+ throw new SearchException( "Use of @AnalyzerDef while Solr is not present in the classpath. Add apache-solr-analyzer.jar" );
+ }
+ // SolrAnalyzerBuilder references Solr classes.
+ // InitContext should not (directly or indirectly) load a Solr class to avoid hard dependency
+ // unless necessary
+ // the current mechanism (check Solr class presence and call SolrAnalyzerBuilder if needed
+ // seems to be sufficient on Apple VM (derived from Sun's
+ // TODO check on other VMs and be ready for a more reflexive approach
+ return SolrAnalyzerBuilder.buildAnalyzer( analyzerDef );
+ }
+
+ public boolean isJpaPresent() {
+ return jpaPresent;
+ }
+
+ private boolean isPresent(String classname) {
+ try {
+ ReflectHelper.classForName( classname, InitContext.class );
+ return true;
+ }
+ catch ( Exception e ) {
+ return false;
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/InitContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MappingModelMetadataProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MappingModelMetadataProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MappingModelMetadataProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,596 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.beans.Introspector;
+import java.lang.annotation.Annotation;
+import java.lang.annotation.ElementType;
+import java.lang.reflect.AnnotatedElement;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import org.hibernate.annotations.common.annotationfactory.AnnotationDescriptor;
+import org.hibernate.annotations.common.annotationfactory.AnnotationFactory;
+import org.hibernate.annotations.common.reflection.AnnotationReader;
+import org.hibernate.annotations.common.reflection.Filter;
+import org.hibernate.annotations.common.reflection.MetadataProvider;
+import org.hibernate.annotations.common.reflection.ReflectionUtil;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.AnalyzerDiscriminator;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.CalendarBridge;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.ClassBridges;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.DynamicBoost;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.search.annotations.Fields;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.FullTextFilterDefs;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.ProvidedId;
+import org.hibernate.search.annotations.Similarity;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.TokenizerDef;
+import org.hibernate.search.cfg.EntityDescriptor;
+import org.hibernate.search.cfg.PropertyDescriptor;
+import org.hibernate.search.cfg.SearchMapping;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MappingModelMetadataProvider implements MetadataProvider {
+
+ private static final Filter FILTER = new Filter() {
+ public boolean returnStatic() {
+ return false;
+ }
+
+ public boolean returnTransient() {
+ return true;
+ }
+ };
+
+ private final MetadataProvider delegate;
+ private final SearchMapping mapping;
+ private final Map<AnnotatedElement, AnnotationReader> cache = new HashMap<AnnotatedElement, AnnotationReader>(100);
+ private Map<Object, Object> defaults;
+
+ public MappingModelMetadataProvider(MetadataProvider delegate, SearchMapping mapping) {
+ this.delegate = delegate;
+ this.mapping = mapping;
+ }
+
+ public Map<Object, Object> getDefaults() {
+ if (defaults == null) {
+ final Map<Object, Object> delegateDefaults = delegate.getDefaults();
+ defaults = delegateDefaults == null ?
+ new HashMap<Object, Object>() :
+ new HashMap<Object, Object>(delegateDefaults);
+ defaults.put( AnalyzerDefs.class, createAnalyzerDefArray() );
+ if (!mapping.getFullTextFilerDefs().isEmpty()) {
+ defaults.put(FullTextFilterDefs.class, createFullTextFilterDefsForMapping());
+ }
+ }
+ return defaults;
+ }
+
+
+
+ public AnnotationReader getAnnotationReader(AnnotatedElement annotatedElement) {
+ AnnotationReader reader = cache.get(annotatedElement);
+ if (reader == null) {
+ reader = new MappingModelAnnotationReader( mapping, delegate, annotatedElement);
+ cache.put( annotatedElement, reader );
+ }
+ return reader;
+ }
+
+ private AnalyzerDef[] createAnalyzerDefArray() {
+ AnalyzerDef[] defs = new AnalyzerDef[ mapping.getAnalyzerDefs().size() ];
+ int index = 0;
+ for ( Map<String, Object> analyzerDef : mapping.getAnalyzerDefs() ) {
+ defs[index] = createAnalyzerDef( analyzerDef );
+ index++;
+ }
+ return defs;
+ }
+
+ private FullTextFilterDef[] createFullTextFilterDefsForMapping() {
+ Set<Map<String, Object>> fullTextFilterDefs = mapping.getFullTextFilerDefs();
+ FullTextFilterDef[] filters = new FullTextFilterDef[fullTextFilterDefs.size()];
+ int index = 0;
+ for(Map<String,Object> filterDef : fullTextFilterDefs) {
+ filters[index] = createFullTextFilterDef(filterDef);
+ index++;
+ }
+ return filters;
+ }
+
+ private static FullTextFilterDef createFullTextFilterDef(Map<String,Object> filterDef) {
+ AnnotationDescriptor fullTextFilterDefAnnotation = new AnnotationDescriptor( FullTextFilterDef.class );
+ for (Entry<String, Object> entry : filterDef.entrySet()) {
+ fullTextFilterDefAnnotation.setValue(entry.getKey(), entry.getValue());
+ }
+
+ return AnnotationFactory.create( fullTextFilterDefAnnotation );
+ }
+
+ private static FullTextFilterDef[] createFullTextFilterDefArray(Set<Map<String, Object>> fullTextFilterDefs) {
+ FullTextFilterDef[] filters = new FullTextFilterDef[fullTextFilterDefs.size()];
+ int index = 0;
+ for(Map<String,Object> filterDef : fullTextFilterDefs) {
+ filters[index] = createFullTextFilterDef(filterDef);
+ index++;
+ }
+ return filters;
+ }
+
+ private AnalyzerDef createAnalyzerDef(Map<String, Object> analyzerDef) {
+ AnnotationDescriptor analyzerDefAnnotation = new AnnotationDescriptor( AnalyzerDef.class );
+ for ( Map.Entry<String, Object> entry : analyzerDef.entrySet() ) {
+ if ( entry.getKey().equals( "tokenizer" ) ) {
+ AnnotationDescriptor tokenizerAnnotation = new AnnotationDescriptor( TokenizerDef.class );
+ @SuppressWarnings( "unchecked" )
+ Map<String, Object> tokenizer = (Map<String, Object>) entry.getValue();
+ for( Map.Entry<String, Object> tokenizerEntry : tokenizer.entrySet() ) {
+ if ( tokenizerEntry.getKey().equals( "params" ) ) {
+ addParamsToAnnotation( tokenizerAnnotation, tokenizerEntry );
+ }
+ else {
+ tokenizerAnnotation.setValue( tokenizerEntry.getKey(), tokenizerEntry.getValue() );
+ }
+ }
+ analyzerDefAnnotation.setValue( "tokenizer", AnnotationFactory.create( tokenizerAnnotation ) );
+ }
+ else if ( entry.getKey().equals( "filters" ) ) {
+ @SuppressWarnings("unchecked") TokenFilterDef[] filtersArray = createFilters( (List<Map<String, Object>>) entry.getValue() );
+ analyzerDefAnnotation.setValue( "filters", filtersArray );
+ }
+ else {
+ analyzerDefAnnotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ }
+ return AnnotationFactory.create( analyzerDefAnnotation );
+ }
+
+ static private void addParamsToAnnotation(AnnotationDescriptor annotationDescriptor, Map.Entry<String, Object> entry) {
+ @SuppressWarnings("unchecked") Parameter[] paramsArray = createParams( ( List<Map<String, Object>> ) entry.getValue() );
+ annotationDescriptor.setValue( "params", paramsArray );
+ }
+
+ private TokenFilterDef[] createFilters(List<Map<String, Object>> filters) {
+ TokenFilterDef[] filtersArray = new TokenFilterDef[filters.size()];
+ int index = 0;
+ for (Map<String, Object> filter : filters) {
+ AnnotationDescriptor filterAnn = new AnnotationDescriptor( TokenFilterDef.class );
+ for ( Map.Entry<String, Object> filterEntry : filter.entrySet() ) {
+ if ( filterEntry.getKey().equals( "params" ) ) {
+ addParamsToAnnotation( filterAnn, filterEntry );
+ }
+ else {
+ filterAnn.setValue( filterEntry.getKey(), filterEntry.getValue() );
+ }
+ }
+ filtersArray[index] = AnnotationFactory.create( filterAnn );
+ index++;
+ }
+ return filtersArray;
+ }
+
+ private static Parameter[] createParams(List<Map<String, Object>> params) {
+ Parameter[] paramArray = new Parameter[ params.size() ];
+ int index = 0;
+ for ( Map<String, Object> entry : params) {
+ AnnotationDescriptor paramAnnotation = new AnnotationDescriptor( Parameter.class );
+ paramAnnotation.setValue( "name", entry.get("name") );
+ paramAnnotation.setValue( "value", entry.get("value") );
+ paramArray[index] = AnnotationFactory.create( paramAnnotation );
+ index++;
+ }
+ return paramArray;
+ }
+
+ private static class MappingModelAnnotationReader implements AnnotationReader {
+ private AnnotationReader delegate;
+ private SearchMapping mapping;
+ private transient Annotation[] annotationsArray;
+ private transient Map<Class<? extends Annotation>, Annotation> annotations;
+ private Class<?> entityType;
+ private ElementType elementType;
+ private String propertyName;
+
+ public MappingModelAnnotationReader(SearchMapping mapping, MetadataProvider delegate, AnnotatedElement el) {
+ this.delegate = delegate.getAnnotationReader( el );
+ this.mapping = mapping;
+ if ( el instanceof Class ) {
+ entityType = (Class<?>) el;
+ }
+ else if ( el instanceof Field ) {
+ Field field = (Field) el;
+ entityType = field.getDeclaringClass();
+ propertyName = field.getName();
+ elementType = ElementType.FIELD;
+ }
+ else if ( el instanceof Method ) {
+ Method method = (Method) el;
+ entityType = method.getDeclaringClass();
+ propertyName = method.getName();
+ if ( ReflectionUtil.isProperty(
+ method,
+ null, //this is yukky!! we'd rather get the TypeEnvironment()
+ FILTER
+ ) ) {
+ if ( propertyName.startsWith( "get" ) ) {
+ propertyName = Introspector.decapitalize( propertyName.substring( "get".length() ) );
+ }
+ else if ( propertyName.startsWith( "is" ) ) {
+ propertyName = Introspector.decapitalize( propertyName.substring( "is".length() ) );
+ }
+ else {
+ throw new RuntimeException( "Method " + propertyName + " is not a property getter" );
+ }
+ elementType = ElementType.METHOD;
+ }
+ else {
+ throw new SearchException( "Error in programmatic mapping. Method " + propertyName + " is not a property getter" );
+ }
+ }
+ else {
+ entityType = null;
+ propertyName = null;
+ }
+ }
+
+ /**
+ * Consider the class to be free of Hibernate Search annotations. Does nto attempt to merge
+ * data.
+ * TODO do merge data? or safe-guard against errors
+ */
+ private void initAnnotations() {
+ if ( annotationsArray == null ) {
+ annotations = new HashMap<Class<? extends Annotation>, Annotation>();
+ delegatesAnnotationReading();
+ if (entityType != null) {
+ final EntityDescriptor entity = mapping.getEntityDescriptor( entityType );
+ if (entity != null) {
+ if (propertyName == null) {
+ //entityType overriding
+ createIndexed( entity );
+ }
+ else {
+ final PropertyDescriptor property = entity.getPropertyDescriptor( propertyName, elementType );
+ if (property != null) {
+ // property name overriding
+ createDocumentId( property );
+ createAnalyzerDiscriminator( property );
+ createFields( property );
+ createIndexEmbedded(property);
+ createContainedIn(property);
+
+ }
+ }
+ }
+ }
+ else {
+ delegatesAnnotationReading();
+ }
+
+ populateAnnotationArray();
+ }
+ }
+
+
+ private void createDateBridge(PropertyDescriptor property) {
+ Map<String, Object> map = property.getDateBridge();
+ for(Map.Entry<String, Object> entry: map.entrySet()) {
+ AnnotationDescriptor dateBrigeAnnotation = new AnnotationDescriptor( DateBridge.class );
+ dateBrigeAnnotation.setValue(entry.getKey(), entry.getValue());
+ annotations.put( DateBridge.class, AnnotationFactory.create( dateBrigeAnnotation ) );
+ }
+ }
+
+ private void createCalendarBridge(PropertyDescriptor property) {
+ Map<String, Object> map = property.getCalendarBridge();
+ for(Map.Entry<String, Object> entry: map.entrySet()) {
+ AnnotationDescriptor calendarBrigeAnnotation = new AnnotationDescriptor( CalendarBridge.class );
+ calendarBrigeAnnotation.setValue(entry.getKey(), entry.getValue());
+ annotations.put( CalendarBridge.class, AnnotationFactory.create( calendarBrigeAnnotation ) );
+ }
+ }
+
+ private void createDocumentId(PropertyDescriptor property) {
+ Map<String, Object> documentId = property.getDocumentId();
+ if (documentId != null) {
+ AnnotationDescriptor documentIdAnnotation = new AnnotationDescriptor( DocumentId.class );
+ for ( Map.Entry<String, Object> entry : documentId.entrySet() ) {
+ documentIdAnnotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( DocumentId.class, AnnotationFactory.create( documentIdAnnotation ) );
+ }
+ }
+
+ private void createAnalyzerDiscriminator(PropertyDescriptor property) {
+ Map<String, Object> analyzerDiscriminator = property.getAnalyzerDiscriminator();
+ if (analyzerDiscriminator != null) {
+ AnnotationDescriptor analyzerDiscriminatorAnn = new AnnotationDescriptor( AnalyzerDiscriminator.class );
+ for ( Map.Entry<String, Object> entry : analyzerDiscriminator.entrySet() ) {
+ analyzerDiscriminatorAnn.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( AnalyzerDiscriminator.class, AnnotationFactory.create( analyzerDiscriminatorAnn ) );
+ }
+ }
+
+
+ private void createFields(PropertyDescriptor property) {
+ final Collection<Map<String,Object>> fields = property.getFields();
+ List<org.hibernate.search.annotations.Field> fieldAnnotations =
+ new ArrayList<org.hibernate.search.annotations.Field>( fields.size() );
+ for(Map<String, Object> field : fields) {
+ AnnotationDescriptor fieldAnnotation = new AnnotationDescriptor( org.hibernate.search.annotations.Field.class );
+ for ( Map.Entry<String, Object> entry : field.entrySet() ) {
+ if ( entry.getKey().equals( "analyzer" ) ) {
+ AnnotationDescriptor analyzerAnnotation = new AnnotationDescriptor( Analyzer.class );
+ @SuppressWarnings( "unchecked" )
+ Map<String, Object> analyzer = (Map<String, Object>) entry.getValue();
+ for( Map.Entry<String, Object> analyzerEntry : analyzer.entrySet() ) {
+ analyzerAnnotation.setValue( analyzerEntry.getKey(), analyzerEntry.getValue() );
+ }
+ fieldAnnotation.setValue( "analyzer", AnnotationFactory.create( analyzerAnnotation ) );
+ }
+ else if ( entry.getKey().equals( "boost" ) ) {
+ AnnotationDescriptor boostAnnotation = new AnnotationDescriptor( Boost.class );
+ @SuppressWarnings( "unchecked" )
+ Map<String, Object> boost = (Map<String, Object>) entry.getValue();
+ for( Map.Entry<String, Object> boostEntry : boost.entrySet() ) {
+ boostAnnotation.setValue( boostEntry.getKey(), boostEntry.getValue() );
+ }
+ fieldAnnotation.setValue( "boost", AnnotationFactory.create( boostAnnotation ) );
+ }
+ else if ( entry.getKey().equals( "bridge" ) ) {
+ AnnotationDescriptor bridgeAnnotation = new AnnotationDescriptor( FieldBridge.class );
+ @SuppressWarnings( "unchecked" )
+ Map<String, Object> bridge = (Map<String, Object>) entry.getValue();
+ for( Map.Entry<String, Object> bridgeEntry : bridge.entrySet() ) {
+ if ( bridgeEntry.getKey().equals( "params" ) ) {
+ addParamsToAnnotation( bridgeAnnotation, bridgeEntry );
+ }
+ else {
+ bridgeAnnotation.setValue( bridgeEntry.getKey(), bridgeEntry.getValue() );
+ }
+ }
+ fieldAnnotation.setValue( "bridge", AnnotationFactory.create( bridgeAnnotation ) );
+ }
+ else {
+ fieldAnnotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ }
+ fieldAnnotations.add( (org.hibernate.search.annotations.Field) AnnotationFactory.create( fieldAnnotation ) );
+ }
+ AnnotationDescriptor fieldsAnnotation = new AnnotationDescriptor( Fields.class );
+
+ final org.hibernate.search.annotations.Field[] fieldArray =
+ new org.hibernate.search.annotations.Field[fieldAnnotations.size()];
+ final org.hibernate.search.annotations.Field[] fieldAsArray = fieldAnnotations.toArray( fieldArray );
+
+ fieldsAnnotation.setValue( "value", fieldAsArray );
+ annotations.put( Fields.class, AnnotationFactory.create( fieldsAnnotation ) );
+ createDateBridge(property);
+ createCalendarBridge(property);
+ createDynamicBoost(property);
+
+ }
+
+ private void createDynamicBoost(PropertyDescriptor property) {
+ if (property.getDynamicBoost() != null) {
+ AnnotationDescriptor dynamicBoostAnn = new AnnotationDescriptor( DynamicBoost.class );
+ Set<Entry<String,Object>> entrySet = property.getDynamicBoost().entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ dynamicBoostAnn.setValue(entry.getKey(), entry.getValue());
+ }
+ annotations.put(DynamicBoost.class, AnnotationFactory.create( dynamicBoostAnn ));
+ }
+ }
+ private void createContainedIn(PropertyDescriptor property) {
+ if (property.getContainedIn() != null) {
+ Map<String, Object> containedIn = property.getContainedIn();
+ AnnotationDescriptor containedInAnn = new AnnotationDescriptor( ContainedIn.class );
+ Set<Entry<String,Object>> entrySet = containedIn.entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ containedInAnn.setValue(entry.getKey(), entry.getValue());
+ }
+ annotations.put(ContainedIn.class,AnnotationFactory.create(containedInAnn));
+ }
+ }
+
+ private void createIndexEmbedded(PropertyDescriptor property) {
+ Map<String, Object> indexEmbedded = property.getIndexEmbedded();
+ if (indexEmbedded != null) {
+ AnnotationDescriptor indexEmbeddedAnn = new AnnotationDescriptor(IndexedEmbedded.class);
+ Set<Entry<String,Object>> entrySet = indexEmbedded.entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ indexEmbeddedAnn.setValue(entry.getKey(), entry.getValue());
+ }
+ annotations.put(IndexedEmbedded.class, AnnotationFactory.create(indexEmbeddedAnn));
+ }
+ }
+
+ private void createIndexed(EntityDescriptor entity) {
+ Class<? extends Annotation> annotationType = Indexed.class;
+ AnnotationDescriptor annotation = new AnnotationDescriptor( annotationType );
+ if (entity.getIndexed() != null) {
+ for ( Map.Entry<String, Object> entry : entity.getIndexed().entrySet() ) {
+ annotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( annotationType, AnnotationFactory.create( annotation ) );
+ }
+
+ if ( entity.getSimilarity() != null ) {
+ annotation = new AnnotationDescriptor( Similarity.class );
+ for ( Map.Entry<String, Object> entry : entity.getSimilarity().entrySet() ) {
+ annotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( Similarity.class, AnnotationFactory.create( annotation ) );
+ }
+
+ if ( entity.getBoost() != null ) {
+ annotation = new AnnotationDescriptor( Boost.class );
+ for ( Map.Entry<String, Object> entry : entity.getBoost().entrySet() ) {
+ annotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( Boost.class, AnnotationFactory.create( annotation ) );
+ }
+
+ if ( entity.getAnalyzerDiscriminator() != null ) {
+ annotation = new AnnotationDescriptor( AnalyzerDiscriminator.class );
+ for ( Map.Entry<String, Object> entry : entity.getAnalyzerDiscriminator().entrySet() ) {
+ annotation.setValue( entry.getKey(), entry.getValue() );
+ }
+ annotations.put( AnalyzerDiscriminator.class, AnnotationFactory.create( annotation ) );
+ }
+ if (entity.getFullTextFilterDefs().size() > 0) {
+ AnnotationDescriptor fullTextFilterDefsAnnotation = new AnnotationDescriptor( FullTextFilterDefs.class );
+ FullTextFilterDef[] fullTextFilterDefArray = createFullTextFilterDefArray(entity.getFullTextFilterDefs());
+ fullTextFilterDefsAnnotation.setValue("value", fullTextFilterDefArray);
+ annotations.put( FullTextFilterDefs.class, AnnotationFactory.create( fullTextFilterDefsAnnotation ) );
+ }
+ if (entity.getProvidedId() != null) {
+ createProvidedId(entity);
+ }
+
+ if (entity.getClassBridgeDefs().size() > 0) {
+ AnnotationDescriptor classBridgesAnn = new AnnotationDescriptor( ClassBridges.class );
+ ClassBridge[] classBridesDefArray = createClassBridgesDefArray(entity.getClassBridgeDefs());
+ classBridgesAnn.setValue("value", classBridesDefArray);
+ annotations.put(ClassBridges.class, AnnotationFactory.create( classBridgesAnn ));
+ }
+
+ if (entity.getDynamicBoost() != null) {
+ AnnotationDescriptor dynamicBoostAnn = new AnnotationDescriptor( DynamicBoost.class );
+ Set<Entry<String,Object>> entrySet = entity.getDynamicBoost().entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ dynamicBoostAnn.setValue(entry.getKey(), entry.getValue());
+ }
+ annotations.put(DynamicBoost.class, AnnotationFactory.create( dynamicBoostAnn ));
+ }
+
+ }
+
+ private ClassBridge[] createClassBridgesDefArray(Set<Map<String, Object>> classBridgeDefs) {
+ ClassBridge[] classBridgeDefArray = new ClassBridge[classBridgeDefs.size()];
+ int index = 0;
+ for(Map<String,Object> classBridgeDef : classBridgeDefs) {
+ classBridgeDefArray[index] = createClassBridge(classBridgeDef);
+ index++;
+ }
+
+ return classBridgeDefArray;
+ }
+
+
+ private ClassBridge createClassBridge(Map<String, Object> classBridgeDef) {
+ AnnotationDescriptor annotation = new AnnotationDescriptor( ClassBridge.class );
+ Set<Entry<String,Object>> entrySet = classBridgeDef.entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ if (entry.getKey().equals("params")) {
+ addParamsToAnnotation(annotation, entry);
+ } else {
+ annotation.setValue(entry.getKey(), entry.getValue());
+ }
+ }
+ return AnnotationFactory.create( annotation );
+ }
+
+ private void createProvidedId(EntityDescriptor entity) {
+ AnnotationDescriptor annotation = new AnnotationDescriptor( ProvidedId.class );
+ Set<Entry<String,Object>> entrySet = entity.getProvidedId().entrySet();
+ for (Entry<String, Object> entry : entrySet) {
+ if (entry.getKey().equals("bridge")) {
+ AnnotationDescriptor bridgeAnnotation = new AnnotationDescriptor( FieldBridge.class );
+ @SuppressWarnings("unchecked")
+ Map<String, Object> bridge = (Map<String, Object>) entry.getValue();
+ for( Map.Entry<String, Object> bridgeEntry : bridge.entrySet() ) {
+ if ( bridgeEntry.getKey().equals( "params" ) ) {
+ addParamsToAnnotation( bridgeAnnotation, bridgeEntry );
+ }
+ else {
+ bridgeAnnotation.setValue( bridgeEntry.getKey(), bridgeEntry.getValue() );
+ }
+ }
+ annotation.setValue( "bridge", AnnotationFactory.create( bridgeAnnotation ) );
+ } else {
+ annotation.setValue(entry.getKey(), entry.getValue());
+ }
+ }
+ annotations.put( ProvidedId.class, AnnotationFactory.create( annotation ) );
+ }
+
+ private void populateAnnotationArray() {
+ annotationsArray = new Annotation[ annotations.size() ];
+ int index = 0;
+ for( Annotation ann: annotations.values() ) {
+ annotationsArray[index] = ann;
+ index++;
+ }
+ }
+
+ private void delegatesAnnotationReading() {
+ for ( Annotation a : delegate.getAnnotations() ) {
+ annotations.put( a.annotationType(), a );
+ }
+ }
+
+ @SuppressWarnings( "unchecked" )
+ public <T extends Annotation> T getAnnotation(Class<T> annotationType) {
+ initAnnotations();
+ return (T) annotations.get( annotationType );
+ }
+
+ @SuppressWarnings( "unchecked" )
+ public <T extends Annotation> boolean isAnnotationPresent(Class<T> annotationType) {
+ initAnnotations();
+ return (T) annotations.get( annotationType ) != null;
+ }
+
+ public Annotation[] getAnnotations() {
+ initAnnotations();
+ return new Annotation[0]; //To change body of implemented methods use File | Settings | File Templates.
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MappingModelMetadataProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MassIndexerImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MassIndexerImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MassIndexerImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,201 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+
+import org.hibernate.CacheMode;
+import org.hibernate.SessionFactory;
+import org.hibernate.search.MassIndexer;
+import org.hibernate.search.batchindexing.BatchCoordinator;
+import org.hibernate.search.batchindexing.Executors;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * Prepares and configures a BatchIndexingWorkspace to start rebuilding
+ * the indexes for all entity instances in the database.
+ * The type of these entities is either all indexed entities or a
+ * subset, always including all subtypes.
+ *
+ * @author Sanne Grinovero
+ */
+public class MassIndexerImpl implements MassIndexer {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final SearchFactoryImplementor searchFactoryImplementor;
+ private final SessionFactory sessionFactory;
+
+ protected Set<Class<?>> rootEntities = new HashSet<Class<?>>();
+
+ // default settings defined here:
+ private int objectLoadingThreads = 2; //loading the main entity
+ private int collectionLoadingThreads = 4; //also responsible for loading of lazy @IndexedEmbedded collections
+// private int writerThreads = 1; //also running the Analyzers
+ private int objectLoadingBatchSize = 10;
+ private int objectsLimit = 0; //means no limit at all
+ private CacheMode cacheMode = CacheMode.IGNORE;
+ private boolean optimizeAtEnd = true;
+ private boolean purgeAtStart = true;
+ private boolean optimizeAfterPurge = true;
+ private MassIndexerProgressMonitor monitor = new SimpleIndexingProgressMonitor();
+
+ protected MassIndexerImpl(SearchFactoryImplementor searchFactory, SessionFactory sessionFactory, Class<?>...entities) {
+ this.searchFactoryImplementor = searchFactory;
+ this.sessionFactory = sessionFactory;
+ rootEntities = toRootEntities( searchFactoryImplementor, entities );
+ }
+
+ /**
+ * From the set of classes a new set is built containing all indexed
+ * subclasses, but removing then all subtypes of indexed entities.
+ * @param selection
+ * @return a new set of entities
+ */
+ private static Set<Class<?>> toRootEntities(SearchFactoryImplementor searchFactoryImplementor, Class<?>... selection) {
+ Set<Class<?>> entities = new HashSet<Class<?>>();
+ //first build the "entities" set containing all indexed subtypes of "selection".
+ for (Class<?> entityType : selection) {
+ Set<Class<?>> targetedClasses = searchFactoryImplementor.getIndexedTypesPolymorphic( new Class[] {entityType} );
+ if ( targetedClasses.isEmpty() ) {
+ String msg = entityType.getName() + " is not an indexed entity or a subclass of an indexed entity";
+ throw new IllegalArgumentException( msg );
+ }
+ entities.addAll( targetedClasses );
+ }
+ Set<Class<?>> cleaned = new HashSet<Class<?>>();
+ Set<Class<?>> toRemove = new HashSet<Class<?>>();
+ //now remove all repeated types to avoid duplicate loading by polymorphic query loading
+ for (Class<?> type : entities) {
+ boolean typeIsOk = true;
+ for (Class<?> existing : cleaned) {
+ if ( existing.isAssignableFrom( type ) ) {
+ typeIsOk = false;
+ break;
+ }
+ if ( type.isAssignableFrom( existing ) ) {
+ toRemove.add( existing );
+ }
+ }
+ if ( typeIsOk ) {
+ cleaned.add( type );
+ }
+ }
+ cleaned.removeAll( toRemove );
+ log.debug( "Targets for indexing job: {}", cleaned );
+ return cleaned;
+ }
+
+ public MassIndexer cacheMode(CacheMode cacheMode) {
+ if ( cacheMode == null )
+ throw new IllegalArgumentException( "cacheMode must not be null" );
+ this.cacheMode = cacheMode;
+ return this;
+ }
+
+ public MassIndexer threadsToLoadObjects(int numberOfThreads) {
+ if ( numberOfThreads < 1 )
+ throw new IllegalArgumentException( "numberOfThreads must be at least 1" );
+ this.objectLoadingThreads = numberOfThreads;
+ return this;
+ }
+
+ public MassIndexer batchSizeToLoadObjects(int batchSize) {
+ if ( batchSize < 1 )
+ throw new IllegalArgumentException( "batchSize must be at least 1" );
+ this.objectLoadingBatchSize = batchSize;
+ return this;
+ }
+
+ public MassIndexer threadsForSubsequentFetching(int numberOfThreads) {
+ if ( numberOfThreads < 1 )
+ throw new IllegalArgumentException( "numberOfThreads must be at least 1" );
+ this.collectionLoadingThreads = numberOfThreads;
+ return this;
+ }
+
+ //TODO see MassIndexer interface
+// public MassIndexer threadsForIndexWriter(int numberOfThreads) {
+// if ( numberOfThreads < 1 )
+// throw new IllegalArgumentException( "numberOfThreads must be at least 1" );
+// this.writerThreads = numberOfThreads;
+// return this;
+// }
+
+ public MassIndexer optimizeOnFinish(boolean optimize) {
+ this.optimizeAtEnd = optimize;
+ return this;
+ }
+
+ public MassIndexer optimizeAfterPurge(boolean optimize) {
+ this.optimizeAfterPurge = optimize;
+ return this;
+ }
+
+ public MassIndexer purgeAllOnStart(boolean purgeAll) {
+ this.purgeAtStart = purgeAll;
+ return this;
+ }
+
+ public Future<?> start() {
+ BatchCoordinator coordinator = createCoordinator();
+ ExecutorService executor = Executors.newFixedThreadPool( 1, "batch coordinator" );
+ try {
+ Future<?> submit = executor.submit( coordinator );
+ return submit;
+ }
+ finally {
+ executor.shutdown();
+ }
+ }
+
+ public void startAndWait() throws InterruptedException {
+ BatchCoordinator coordinator = createCoordinator();
+ coordinator.run();
+ if ( Thread.currentThread().isInterrupted() ) {
+ throw new InterruptedException();
+ }
+ }
+
+ protected BatchCoordinator createCoordinator() {
+ return new BatchCoordinator( rootEntities, searchFactoryImplementor, sessionFactory,
+ objectLoadingThreads, collectionLoadingThreads,
+ cacheMode, objectLoadingBatchSize, objectsLimit,
+ optimizeAtEnd, purgeAtStart, optimizeAfterPurge,
+ monitor );
+ }
+
+ public MassIndexer limitIndexedObjectsTo(int maximum) {
+ this.objectsLimit = maximum;
+ return this;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/MassIndexerImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchFactoryImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchFactoryImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchFactoryImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,686 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.beans.Introspector;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.search.Similarity;
+import org.hibernate.annotations.common.reflection.MetadataProvider;
+import org.hibernate.annotations.common.reflection.MetadataProviderInjector;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.Version;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.FullTextFilterDefs;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Key;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.LuceneIndexingParameters;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.OptimizeLuceneWork;
+import org.hibernate.search.backend.Worker;
+import org.hibernate.search.backend.WorkerFactory;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.search.backend.configuration.MaskedProperty;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.backend.impl.batchlucene.LuceneBatchBackend;
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.cfg.SearchMapping;
+import org.hibernate.search.engine.DocumentBuilderContainedEntity;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.EntityState;
+import org.hibernate.search.engine.FilterDef;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.filter.CachingWrapperFilter;
+import org.hibernate.search.filter.FilterCachingStrategy;
+import org.hibernate.search.filter.MRUFilterCachingStrategy;
+import org.hibernate.search.filter.ShardSensitiveOnlyFilter;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.reader.ReaderProviderFactory;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.DirectoryProviderFactory;
+import org.hibernate.search.store.optimization.OptimizerStrategy;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.PluginLoader;
+import org.hibernate.util.StringHelper;
+import org.slf4j.Logger;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SearchFactoryImpl implements SearchFactoryImplementor {
+
+ static {
+ Version.touch();
+ }
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final Map<Class<?>, DocumentBuilderIndexedEntity<?>> documentBuildersIndexedEntities = new HashMap<Class<?>, DocumentBuilderIndexedEntity<?>>();
+ private final Map<Class<?>, DocumentBuilderContainedEntity<?>> documentBuildersContainedEntities = new HashMap<Class<?>, DocumentBuilderContainedEntity<?>>();
+ //keep track of the index modifiers per DirectoryProvider since multiple entity can use the same directory provider
+ private final Map<DirectoryProvider<?>, DirectoryProviderData> dirProviderData = new HashMap<DirectoryProvider<?>, DirectoryProviderData>();
+ private final Worker worker;
+ private final ReaderProvider readerProvider;
+ private BackendQueueProcessorFactory backendQueueProcessorFactory;
+ private final Map<String, FilterDef> filterDefinitions = new HashMap<String, FilterDef>();
+ private final FilterCachingStrategy filterCachingStrategy;
+ private Map<String, Analyzer> analyzers;
+ private final AtomicBoolean stopped = new AtomicBoolean( false );
+ private final int cacheBitResultsSize;
+ private final Properties configurationProperties;
+
+ private final PolymorphicIndexHierarchy indexHierarchy = new PolymorphicIndexHierarchy();
+
+ /*
+ * Used as a barrier (piggyback usage) between initialization and subsequent usage of searchFactory in different threads
+ * this is due to our use of the initialize pattern is a few areas
+ * subsequent reads on volatiles should be very cheap on most platform especially since we don't write after init
+ *
+ * This volatile is meant to be written after initialization
+ * and read by all subsequent methods accessing the SearchFactory state
+ * read to be as barrier != 0. If barrier == 0 we have a race condition, but is not likely to happen.
+ */
+ private volatile short barrier;
+
+ /**
+ * Each directory provider (index) can have its own performance settings.
+ */
+ private Map<DirectoryProvider, LuceneIndexingParameters> dirProviderIndexingParams =
+ new HashMap<DirectoryProvider, LuceneIndexingParameters>();
+ private final String indexingStrategy;
+
+
+ public BackendQueueProcessorFactory getBackendQueueProcessorFactory() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return backendQueueProcessorFactory;
+ }
+
+ public void setBackendQueueProcessorFactory(BackendQueueProcessorFactory backendQueueProcessorFactory) {
+ //no need to set a barrier, we init in the same thread as the init one
+ this.backendQueueProcessorFactory = backendQueueProcessorFactory;
+ }
+
+ public SearchFactoryImpl(SearchConfiguration cfg) {
+ ReflectionManager reflectionManager = getReflectionManager(cfg);
+ final SearchMapping mapping = SearchMappingBuilder.getSearchMapping(cfg);
+ if ( mapping != null) {
+ if ( ! ( reflectionManager instanceof MetadataProviderInjector)) {
+ throw new SearchException("Programmatic mapping model used but ReflectionManager does not implement "
+ + MetadataProviderInjector.class.getName() );
+ }
+ MetadataProviderInjector injector = (MetadataProviderInjector) reflectionManager;
+ MetadataProvider original = injector.getMetadataProvider();
+ injector.setMetadataProvider( new MappingModelMetadataProvider( original, mapping ) );
+
+ }
+
+ this.indexingStrategy = defineIndexingStrategy( cfg ); //need to be done before the document builds
+ initDocumentBuilders( cfg, reflectionManager );
+
+ Set<Class<?>> indexedClasses = documentBuildersIndexedEntities.keySet();
+ for ( DocumentBuilderIndexedEntity builder : documentBuildersIndexedEntities.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ //not really necessary today
+ for ( DocumentBuilderContainedEntity builder : documentBuildersContainedEntities.values() ) {
+ builder.postInitialize( indexedClasses );
+ }
+ fillSimilarityMapping();
+ this.worker = WorkerFactory.createWorker( cfg, this );
+ this.readerProvider = ReaderProviderFactory.createReaderProvider( cfg, this );
+ this.filterCachingStrategy = buildFilterCachingStrategy( cfg.getProperties() );
+ this.cacheBitResultsSize = ConfigurationParseHelper.getIntValue(
+ cfg.getProperties(), Environment.CACHE_DOCIDRESULTS_SIZE, CachingWrapperFilter.DEFAULT_SIZE
+ );
+ this.configurationProperties = cfg.getProperties();
+ this.barrier = 1; //write barrier
+ }
+
+
+
+ private void fillSimilarityMapping() {
+ for ( DirectoryProviderData directoryConfiguration : dirProviderData.values() ) {
+ for (Class<?> indexedType : directoryConfiguration.classes) {
+ DocumentBuilderIndexedEntity<?> documentBuilder = documentBuildersIndexedEntities.get( indexedType );
+ Similarity similarity = documentBuilder.getSimilarity();
+ Similarity prevSimilarity = directoryConfiguration.similarity;
+ if ( prevSimilarity != null && ! prevSimilarity.getClass().equals( similarity.getClass() ) ) {
+ throw new SearchException( "Multiple entities are sharing the same index but are declaring an " +
+ "inconsistent Similarity. When overrriding default Similarity make sure that all types sharing a same index " +
+ "declare the same Similarity implementation." );
+ }
+ else {
+ directoryConfiguration.similarity = similarity;
+ }
+ }
+ }
+ }
+
+ private ReflectionManager getReflectionManager(SearchConfiguration cfg) {
+ ReflectionManager reflectionManager = cfg.getReflectionManager();
+ if ( reflectionManager == null ) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ return reflectionManager;
+ }
+
+ private static String defineIndexingStrategy(SearchConfiguration cfg) {
+ String indexingStrategy = cfg.getProperties().getProperty( Environment.INDEXING_STRATEGY, "event" );
+ if ( !( "event".equals( indexingStrategy ) || "manual".equals( indexingStrategy ) ) ) {
+ throw new SearchException( Environment.INDEXING_STRATEGY + " unknown: " + indexingStrategy );
+ }
+ return indexingStrategy;
+ }
+
+ public String getIndexingStrategy() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return indexingStrategy;
+ }
+
+ public void close() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ if ( stopped.compareAndSet( false, true ) ) { //make sure we only sop once
+ try {
+ worker.close();
+ }
+ catch ( Exception e ) {
+ log.error( "Worker raises an exception on close()", e );
+ }
+
+ try {
+ readerProvider.destroy();
+ }
+ catch ( Exception e ) {
+ log.error( "ReaderProvider raises an exception on destroy()", e );
+ }
+
+ //TODO move directory provider cleaning to DirectoryProviderFactory
+ for ( DirectoryProvider dp : getDirectoryProviders() ) {
+ try {
+ dp.stop();
+ }
+ catch ( Exception e ) {
+ log.error( "DirectoryProvider raises an exception on stop() ", e );
+ }
+ }
+ }
+ }
+
+ public void addClassToDirectoryProvider(Class<?> clazz, DirectoryProvider<?> directoryProvider, boolean exclusiveIndexUsage) {
+ //no need to set a read barrier, we only use this class in the init thread
+ DirectoryProviderData data = dirProviderData.get( directoryProvider );
+ if ( data == null ) {
+ data = new DirectoryProviderData();
+ dirProviderData.put( directoryProvider, data );
+ }
+ data.classes.add( clazz );
+ data.exclusiveIndexUsage = exclusiveIndexUsage;
+ }
+
+ public Set<Class<?>> getClassesInDirectoryProvider(DirectoryProvider<?> directoryProvider) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return Collections.unmodifiableSet( dirProviderData.get( directoryProvider ).classes );
+ }
+
+ private void bindFilterDefs(XClass mappedXClass) {
+ FullTextFilterDef defAnn = mappedXClass.getAnnotation( FullTextFilterDef.class );
+ if ( defAnn != null ) {
+ bindFilterDef( defAnn, mappedXClass );
+ }
+ FullTextFilterDefs defsAnn = mappedXClass.getAnnotation( FullTextFilterDefs.class );
+ if ( defsAnn != null ) {
+ for ( FullTextFilterDef def : defsAnn.value() ) {
+ bindFilterDef( def, mappedXClass );
+ }
+ }
+ }
+
+
+ private void initProgrammaticallyDefinedFilterDef(ReflectionManager reflectionManager) {
+ @SuppressWarnings("unchecked") Map defaults = reflectionManager.getDefaults();
+ FullTextFilterDef[] filterDefs = (FullTextFilterDef[]) defaults.get(FullTextFilterDefs.class);
+ if (filterDefs != null && filterDefs.length != 0) {
+ for (FullTextFilterDef defAnn : filterDefs) {
+ if ( filterDefinitions.containsKey( defAnn.name() ) ) {
+ throw new SearchException("Multiple definition of @FullTextFilterDef.name=" + defAnn.name());
+ }
+ bindFullTextFilterDef(defAnn);
+ }
+ }
+ }
+
+ private void bindFilterDef(FullTextFilterDef defAnn, XClass mappedXClass) {
+ if ( filterDefinitions.containsKey( defAnn.name() ) ) {
+ throw new SearchException(
+ "Multiple definition of @FullTextFilterDef.name=" + defAnn.name() + ": "
+ + mappedXClass.getName()
+ );
+ }
+
+ bindFullTextFilterDef(defAnn);
+ }
+
+ private void bindFullTextFilterDef(FullTextFilterDef defAnn) {
+ FilterDef filterDef = new FilterDef( defAnn );
+ if ( filterDef.getImpl().equals( ShardSensitiveOnlyFilter.class ) ) {
+ //this is a placeholder don't process regularly
+ filterDefinitions.put( defAnn.name(), filterDef );
+ return;
+ }
+ try {
+ filterDef.getImpl().newInstance();
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException( "Unable to create Filter class: " + filterDef.getImpl().getName(), e );
+ }
+ catch ( InstantiationException e ) {
+ throw new SearchException( "Unable to create Filter class: " + filterDef.getImpl().getName(), e );
+ }
+ for ( Method method : filterDef.getImpl().getMethods() ) {
+ if ( method.isAnnotationPresent( Factory.class ) ) {
+ if ( filterDef.getFactoryMethod() != null ) {
+ throw new SearchException(
+ "Multiple @Factory methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName()
+ );
+ }
+ if ( !method.isAccessible() ) {
+ method.setAccessible( true );
+ }
+ filterDef.setFactoryMethod( method );
+ }
+ if ( method.isAnnotationPresent( Key.class ) ) {
+ if ( filterDef.getKeyMethod() != null ) {
+ throw new SearchException(
+ "Multiple @Key methods found" + defAnn.name() + ": "
+ + filterDef.getImpl().getName() + "." + method.getName()
+ );
+ }
+ if ( !method.isAccessible() ) {
+ method.setAccessible( true );
+ }
+ filterDef.setKeyMethod( method );
+ }
+
+ String name = method.getName();
+ if ( name.startsWith( "set" ) && method.getParameterTypes().length == 1 ) {
+ filterDef.addSetter( Introspector.decapitalize( name.substring( 3 ) ), method );
+ }
+ }
+ filterDefinitions.put( defAnn.name(), filterDef );
+ }
+
+
+ public Map<Class<?>, DocumentBuilderIndexedEntity<?>> getDocumentBuildersIndexedEntities() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return documentBuildersIndexedEntities;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> DocumentBuilderIndexedEntity<T> getDocumentBuilderIndexedEntity(Class<T> entityType) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return ( DocumentBuilderIndexedEntity<T> ) documentBuildersIndexedEntities.get( entityType );
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> DocumentBuilderContainedEntity<T> getDocumentBuilderContainedEntity(Class<T> entityType) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return ( DocumentBuilderContainedEntity<T> ) documentBuildersContainedEntities.get( entityType );
+ }
+
+ public Set<DirectoryProvider<?>> getDirectoryProviders() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return this.dirProviderData.keySet();
+ }
+
+ public Worker getWorker() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return worker;
+ }
+
+ public void addOptimizerStrategy(DirectoryProvider<?> provider, OptimizerStrategy optimizerStrategy) {
+ //no need to set a read barrier, we run this method on the init thread
+ DirectoryProviderData data = dirProviderData.get( provider );
+ if ( data == null ) {
+ data = new DirectoryProviderData();
+ dirProviderData.put( provider, data );
+ }
+ data.optimizerStrategy = optimizerStrategy;
+ }
+
+ public void addIndexingParameters(DirectoryProvider<?> provider, LuceneIndexingParameters indexingParams) {
+ //no need to set a read barrier, we run this method on the init thread
+ dirProviderIndexingParams.put( provider, indexingParams );
+ }
+
+ public OptimizerStrategy getOptimizerStrategy(DirectoryProvider<?> provider) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return dirProviderData.get( provider ).optimizerStrategy;
+ }
+
+ public LuceneIndexingParameters getIndexingParameters(DirectoryProvider<?> provider) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return dirProviderIndexingParams.get( provider );
+ }
+
+ public ReaderProvider getReaderProvider() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return readerProvider;
+ }
+
+ public DirectoryProvider[] getDirectoryProviders(Class<?> entity) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ DocumentBuilderIndexedEntity<?> documentBuilder = getDocumentBuilderIndexedEntity( entity );
+ return documentBuilder == null ? null : documentBuilder.getDirectoryProviders();
+ }
+
+ public void optimize() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ Set<Class<?>> clazzs = getDocumentBuildersIndexedEntities().keySet();
+ for ( Class clazz : clazzs ) {
+ optimize( clazz );
+ }
+ }
+
+ public void optimize(Class entityType) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ if ( !getDocumentBuildersIndexedEntities().containsKey( entityType ) ) {
+ throw new SearchException( "Entity not indexed: " + entityType );
+ }
+ List<LuceneWork> queue = new ArrayList<LuceneWork>( 1 );
+ queue.add( new OptimizeLuceneWork( entityType ) );
+ getBackendQueueProcessorFactory().getProcessor( queue ).run();
+ }
+
+ public Analyzer getAnalyzer(String name) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ final Analyzer analyzer = analyzers.get( name );
+ if ( analyzer == null ) {
+ throw new SearchException( "Unknown Analyzer definition: " + name );
+ }
+ return analyzer;
+ }
+
+ public Analyzer getAnalyzer(Class<?> clazz) {
+ if ( clazz == null ) {
+ throw new IllegalArgumentException( "A class has to be specified for retrieving a scoped analyzer" );
+ }
+
+ DocumentBuilderIndexedEntity<?> builder = documentBuildersIndexedEntities.get( clazz );
+ if ( builder == null ) {
+ throw new IllegalArgumentException(
+ "Entity for which to retrieve the scoped analyzer is not an @Indexed entity: " + clazz.getName()
+ );
+ }
+
+ return builder.getAnalyzer();
+ }
+
+ private void initDocumentBuilders(SearchConfiguration cfg, ReflectionManager reflectionManager) {
+ InitContext context = new InitContext( cfg );
+ Iterator<Class<?>> iter = cfg.getClassMappings();
+ DirectoryProviderFactory factory = new DirectoryProviderFactory();
+
+ initProgrammaticAnalyzers(context, reflectionManager);
+ initProgrammaticallyDefinedFilterDef(reflectionManager);
+
+ while ( iter.hasNext() ) {
+ Class mappedClass = iter.next();
+ if ( mappedClass == null ) {
+ continue;
+ }
+ @SuppressWarnings( "unchecked" )
+ XClass mappedXClass = reflectionManager.toXClass( mappedClass );
+ if ( mappedXClass == null ) {
+ continue;
+ }
+
+ if ( mappedXClass.isAnnotationPresent( Indexed.class ) ) {
+
+ if ( mappedXClass.isAbstract() ) {
+ log.warn( "Abstract classes can never insert index documents. Remove @Indexed." );
+ continue;
+ }
+
+ DirectoryProviderFactory.DirectoryProviders providers = factory.createDirectoryProviders(
+ mappedXClass, cfg, this, reflectionManager
+ );
+ //FIXME DocumentBuilderIndexedEntity needs to be built by a helper method receiving Class<T> to infer T properly
+ //XClass unfortunately is not (yet) genericized: TODO?
+ final DocumentBuilderIndexedEntity<?> documentBuilder = new DocumentBuilderIndexedEntity(
+ mappedXClass, context, providers.getProviders(), providers.getSelectionStrategy(),
+ reflectionManager
+ );
+
+ indexHierarchy.addIndexedClass( mappedClass );
+ documentBuildersIndexedEntities.put( mappedClass, documentBuilder );
+ }
+ else {
+ //FIXME DocumentBuilderIndexedEntity needs to be built by a helper method receiving Class<T> to infer T properly
+ //XClass unfortunately is not (yet) genericized: TODO?
+ final DocumentBuilderContainedEntity<?> documentBuilder = new DocumentBuilderContainedEntity(
+ mappedXClass, context, reflectionManager
+ );
+ //TODO enhance that, I don't like to expose EntityState
+ if ( documentBuilder.getEntityState() != EntityState.NON_INDEXABLE ) {
+ documentBuildersContainedEntities.put( mappedClass, documentBuilder );
+ }
+ }
+ bindFilterDefs( mappedXClass );
+ //TODO should analyzer def for classes at tyher sqme level???
+ }
+ analyzers = context.initLazyAnalyzers();
+ factory.startDirectoryProviders();
+ }
+
+ private void initProgrammaticAnalyzers(InitContext context, ReflectionManager reflectionManager) {
+ final Map defaults = reflectionManager.getDefaults();
+
+ if (defaults != null) {
+ AnalyzerDef[] defs = (AnalyzerDef[]) defaults.get( AnalyzerDefs.class );
+ if ( defs != null ) {
+ for (AnalyzerDef def : defs) {
+ context.addAnalyzerDef( def );
+ }
+ }
+ }
+ }
+
+ private static FilterCachingStrategy buildFilterCachingStrategy(Properties properties) {
+ FilterCachingStrategy filterCachingStrategy;
+ String impl = properties.getProperty( Environment.FILTER_CACHING_STRATEGY );
+ if ( StringHelper.isEmpty( impl ) || "mru".equalsIgnoreCase( impl ) ) {
+ filterCachingStrategy = new MRUFilterCachingStrategy();
+ }
+ else {
+ filterCachingStrategy = PluginLoader.instanceFromName( FilterCachingStrategy.class,
+ impl, SearchFactoryImpl.class, "filterCachingStrategy" );
+ }
+ filterCachingStrategy.initialize( properties );
+ return filterCachingStrategy;
+ }
+
+ public FilterCachingStrategy getFilterCachingStrategy() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return filterCachingStrategy;
+ }
+
+ public FilterDef getFilterDefinition(String name) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return filterDefinitions.get( name );
+ }
+
+ private static class DirectoryProviderData {
+ public final ReentrantLock dirLock = new ReentrantLock();
+ public OptimizerStrategy optimizerStrategy;
+ public final Set<Class<?>> classes = new HashSet<Class<?>>( 2 );
+ public Similarity similarity = null;
+ private boolean exclusiveIndexUsage;
+ }
+
+ public ReentrantLock getDirectoryProviderLock(DirectoryProvider<?> dp) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return this.dirProviderData.get( dp ).dirLock;
+ }
+
+ public void addDirectoryProvider(DirectoryProvider<?> provider, boolean exclusiveIndexUsage) {
+ //no need to set a barrier we use this method in the init thread
+ DirectoryProviderData dirConfiguration = new DirectoryProviderData();
+ dirConfiguration.exclusiveIndexUsage = exclusiveIndexUsage;
+ this.dirProviderData.put( provider, dirConfiguration );
+ }
+
+ public int getFilterCacheBitResultsSize() {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return cacheBitResultsSize;
+ }
+
+ public Set<Class<?>> getIndexedTypesPolymorphic(Class<?>[] classes) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return indexHierarchy.getIndexedClasses( classes );
+ }
+
+ public BatchBackend makeBatchBackend(MassIndexerProgressMonitor progressMonitor) {
+ BatchBackend batchBackend;
+ String impl = configurationProperties.getProperty( Environment.BATCH_BACKEND );
+ if ( StringHelper.isEmpty( impl ) || "LuceneBatch".equalsIgnoreCase( impl ) ) {
+ batchBackend = new LuceneBatchBackend();
+ }
+ else {
+ batchBackend = PluginLoader.instanceFromName( BatchBackend.class, impl, SearchFactoryImpl.class,
+ "batchbackend" );
+ }
+ Properties batchBackendConfiguration = new MaskedProperty(
+ this.configurationProperties, Environment.BATCH_BACKEND );
+ batchBackend.initialize( batchBackendConfiguration, progressMonitor, this );
+ return batchBackend;
+ }
+
+ /**
+ * Helper class which keeps track of all super classes and interfaces of the indexed entities.
+ */
+ private static class PolymorphicIndexHierarchy {
+ private Map<Class<?>, Set<Class<?>>> classToIndexedClass;
+
+ PolymorphicIndexHierarchy() {
+ classToIndexedClass = new HashMap<Class<?>, Set<Class<?>>>();
+ }
+
+ void addIndexedClass(Class indexedClass) {
+ addClass( indexedClass, indexedClass );
+ Class superClass = indexedClass.getSuperclass();
+ while ( superClass != null ) {
+ addClass( superClass, indexedClass );
+ superClass = superClass.getSuperclass();
+ }
+ for ( Class clazz : indexedClass.getInterfaces() ) {
+ addClass( clazz, indexedClass );
+ }
+ }
+
+ private void addClass(Class superclass, Class indexedClass) {
+ Set<Class<?>> classesSet = classToIndexedClass.get( superclass );
+ if ( classesSet == null ) {
+ classesSet = new HashSet<Class<?>>();
+ classToIndexedClass.put( superclass, classesSet );
+ }
+ classesSet.add( indexedClass );
+ }
+
+ Set<Class<?>> getIndexedClasses(Class<?>[] classes) {
+ Set<Class<?>> idexedClasses = new HashSet<Class<?>>();
+ for ( Class clazz : classes ) {
+ Set<Class<?>> set = classToIndexedClass.get( clazz );
+ if ( set != null ) {
+ // at this point we don't have to care about including indexed subclasses of a indexed class
+ // MultiClassesQueryLoader will take care of this later and optimise the queries
+ idexedClasses.addAll( set );
+ }
+ }
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Targeted indexed classes for {}: {}", Arrays.toString( classes ), idexedClasses );
+ }
+ return idexedClasses;
+ }
+ }
+
+ public Similarity getSimilarity(DirectoryProvider<?> provider) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ Similarity similarity = dirProviderData.get( provider ).similarity;
+ if ( similarity == null ) throw new SearchException( "Assertion error: a similarity should be defined for each provider" );
+ return similarity;
+ }
+
+ public boolean isExclusiveIndexUsageEnabled(DirectoryProvider<?> provider) {
+ if ( barrier != 0 ) {
+ } //read barrier
+ return dirProviderData.get( provider ).exclusiveIndexUsage;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchFactoryImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchMappingBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchMappingBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchMappingBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,140 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.lang.reflect.Method;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.Environment;
+import org.hibernate.search.cfg.SearchMapping;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.annotations.common.util.ReflectHelper;
+
+import org.slf4j.Logger;
+
+/**
+ * package class extracting the SearchMappingFactory if needed
+ */
+class SearchMappingBuilder {
+ private static final Logger LOG = LoggerFactory.make();
+
+ private SearchMappingBuilder() {
+ }
+
+ /**
+ * This factory method takes a SearchConfiguration object
+ * and returns a SearchMapping object which defines
+ * the programmatic model for indexing entities and fields.
+ *
+ * Throws SearchException:
+ * 1) No @Factory found
+ * 2) Multiple @Factory found
+ * 3) hibernate.search.model_mapping defines a class that cannot be found
+ * 4) Cannot invoke the @Factory method to get new instance of SearchMapping
+ *
+ * @param SearchConfigruation
+ * @return SearchMapping
+ */
+ public static SearchMapping getSearchMapping(SearchConfiguration cfg) {
+
+ //try SearchConfiguration object first and then properties
+ Object modelMappingProperty = cfg.getProgrammaticMapping();
+ if ( modelMappingProperty == null) {
+ modelMappingProperty = cfg.getProperties().get( Environment.MODEL_MAPPING );
+ }
+
+ if ( modelMappingProperty == null) {
+ return null;
+ }
+ SearchMapping mapping = null;
+ Object programmaticConfig = modelMappingProperty;
+ if (programmaticConfig instanceof SearchMapping) {
+ mapping = (SearchMapping) programmaticConfig;
+ return mapping;
+ }
+ Class<?> clazz = getProgrammaticMappingClass(programmaticConfig);
+ Method[] methods = clazz.getDeclaredMethods();
+ int count = 0;
+ for (Method method : methods) {
+ if (method.isAnnotationPresent(Factory.class)) {
+ count++;
+ makeMethodAccessibleIfRequired(method);
+ mapping = getNewInstanceOfSearchMapping(clazz, method);
+ }
+ }
+ validateMappingFactoryDefinition(count, clazz);
+ return mapping;
+ }
+
+ private static SearchMapping getNewInstanceOfSearchMapping(Class<?> clazz, Method method) {
+ SearchMapping mapping = null;
+ try {
+ LOG.debug("invoking factory method [ {}.{} ] to get search mapping instance", clazz.getName(), method.getName());
+ Object instance = clazz.newInstance();
+ mapping = (SearchMapping) method.invoke(instance);
+ } catch (Exception e) {
+ throw new SearchException("Unable to call the factory method: " + clazz.getName() + "." + method.getName(), e);
+ }
+ return mapping;
+ }
+
+ private static void makeMethodAccessibleIfRequired(Method method) {
+ if ( !method.isAccessible() ) {
+ method.setAccessible( true );
+ }
+ }
+
+ private static void validateMappingFactoryDefinition(int count, Class<?> factory) {
+ if (count == 0) {
+ throw new SearchException("No @Factory method defined for building programmatic api on " + factory);
+ }
+ if (count > 1) {
+ throw new SearchException("Multiple @Factory methods defined. Only one factory method required. " + factory);
+ }
+ }
+
+ private static Class<?> getProgrammaticMappingClass(Object programmaticConfig) {
+ Class<?> clazz = null;
+ if (programmaticConfig instanceof String) {
+ final String className = ( String ) programmaticConfig;
+ try {
+ clazz = ReflectHelper.classForName( className, SearchMappingBuilder.class);
+ } catch (ClassNotFoundException e) {
+ throw new SearchException("Unable to find " + Environment.MODEL_MAPPING + "=" + className, e);
+ }
+ } else if (programmaticConfig instanceof Class){
+ clazz = (Class<?>) programmaticConfig;
+ }
+ else {
+ throw new SearchException(Environment.MODEL_MAPPING + " is of an unknown type: " + programmaticConfig.getClass() );
+ }
+ if (clazz == null) {
+ throw new SearchException("No programmatic factory defined");
+ }
+ return clazz;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SearchMappingBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SimpleIndexingProgressMonitor.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SimpleIndexingProgressMonitor.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SimpleIndexingProgressMonitor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,80 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.hibernate.search.batchindexing.MassIndexerProgressMonitor;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * A very simple implementation of MassIndexerProgressMonitor
+ *
+ * @author Sanne Grinovero
+ */
+public class SimpleIndexingProgressMonitor implements MassIndexerProgressMonitor {
+
+ private static final Logger log = LoggerFactory.make();
+ private final AtomicLong documentsDoneCounter = new AtomicLong();
+ private final AtomicLong totalCounter = new AtomicLong();
+ private volatile long startTimeMs;
+
+ public void entitiesLoaded(int size) {
+ //not used
+ }
+
+ public void documentsAdded(long increment) {
+ long current = documentsDoneCounter.addAndGet( increment );
+ if ( current == increment ) {
+ startTimeMs = System.currentTimeMillis();
+ }
+ if ( current % getStatusMessagePeriod() == 0 ) {
+ printStatusMessage( startTimeMs, totalCounter.get(), current );
+ }
+ }
+
+ public void documentsBuilt(int number) {
+ //not used
+ }
+
+ public void addToTotalCount(long count) {
+ totalCounter.addAndGet( count );
+ log.info( "Going to reindex {} entities", count );
+ }
+
+ protected int getStatusMessagePeriod() {
+ return 50;
+ }
+
+ protected void printStatusMessage(long starttimems, long totalTodoCount, long doneCount) {
+ long elapsedMs = System.currentTimeMillis() - starttimems;
+ log.info( "{} documents indexed in {} ms", doneCount, elapsedMs );
+ float estimateSpeed = doneCount * 1000f / elapsedMs ;
+ float estimatePercentileComplete = doneCount * 100f / totalTodoCount ;
+ log.info( "Indexing speed: {} documents/second; progress: {}%", estimateSpeed, estimatePercentileComplete );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SimpleIndexingProgressMonitor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SolrAnalyzerBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SolrAnalyzerBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SolrAnalyzerBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,100 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.impl;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Collections;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.solr.analysis.TokenizerChain;
+import org.apache.solr.analysis.TokenFilterFactory;
+import org.apache.solr.analysis.TokenizerFactory;
+import org.apache.solr.util.plugin.ResourceLoaderAware;
+import org.apache.solr.common.ResourceLoader;
+
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.TokenizerDef;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.util.HibernateSearchResourceLoader;
+
+/**
+ * Instances of this class are used to build Lucene analyzers which are defined using the solr <code>TokenFilterFactory</code>.
+ * To make the dependency to the solr framework optional only this class has direct dependecies to solr. Solr dependencies
+ * are not supposed to be used anywhere else (except the actual configuration of the analyzers in the domain model).
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+class SolrAnalyzerBuilder {
+ private SolrAnalyzerBuilder() {}
+
+ /**
+ * Builds a Lucene <code>Analyzer</code> from the specified <code>AnalyzerDef</code> annotation.
+ *
+ * @param analyzerDef The <code>AnalyzerDef</code> annotation as found in the annotated domain class.
+ * @return a Lucene <code>Analyzer</code>
+ */
+ public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
+ TokenizerDef token = analyzerDef.tokenizer();
+ TokenizerFactory tokenFactory = ( TokenizerFactory ) instantiate( token.factory() );
+ tokenFactory.init( getMapOfParameters( token.params() ) );
+
+ final int length = analyzerDef.filters().length;
+ TokenFilterFactory[] filters = new TokenFilterFactory[length];
+ ResourceLoader resourceLoader = new HibernateSearchResourceLoader();
+ for ( int index = 0 ; index < length ; index++ ) {
+ TokenFilterDef filterDef = analyzerDef.filters()[index];
+ filters[index] = (TokenFilterFactory) instantiate( filterDef.factory() );
+ filters[index].init( getMapOfParameters( filterDef.params() ) );
+ if ( filters[index] instanceof ResourceLoaderAware ) {
+ ((ResourceLoaderAware)filters[index]).inform( resourceLoader );
+ }
+ }
+ return new TokenizerChain(tokenFactory, filters);
+ }
+
+ private static Object instantiate(Class clazz) {
+ try {
+ return clazz.newInstance();
+ }
+ catch (IllegalAccessException e) {
+ throw new SearchException( "Unable to instantiate class: " + clazz, e );
+ }
+ catch (InstantiationException e) {
+ throw new SearchException( "Unable to instantiate class: " + clazz, e );
+ }
+ }
+
+ private static Map<String, String> getMapOfParameters(Parameter[] params) {
+ Map<String, String> mapOfParams = new HashMap<String, String>( params.length );
+ for (Parameter param : params) {
+ mapOfParams.put( param.name(), param.value() );
+ }
+ return Collections.unmodifiableMap( mapOfParams );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/impl/SolrAnalyzerBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextEntityManager.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextEntityManager.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextEntityManager.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,105 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.jpa;
+
+import java.io.Serializable;
+import javax.persistence.EntityManager;
+
+import org.hibernate.search.MassIndexer;
+import org.hibernate.search.SearchFactory;
+
+/**
+ * Extends an EntityManager with Full-Text operations
+ *
+ * @author Emmanuel Bernard
+ */
+public interface FullTextEntityManager extends EntityManager {
+
+ /**
+ * Create a fulltext query on top of a native Lucene query returning the matching objects
+ * of type <code>entities</code> and their respective subclasses.
+ *
+ * @param luceneQuery The native Lucene query to be rn against the Lucene index.
+ * @param entities List of classes for type filtering. The query result will only return entities of
+ * the specified types and their respective subtype. If no class is specified no type filtering will take place.
+ *
+ * @return A <code>FullTextQuery</code> wrapping around the native Lucene wuery.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class<?>... entities);
+
+ /**
+ * Force the (re)indexing of a given <b>managed</b> object.
+ * Indexation is batched per transaction: if a transaction is active, the operation
+ * will not affect the index at least until commit.
+ *
+ * @param entity The entity to index - must not be <code>null</code>.
+ *
+ * @throws IllegalArgumentException if entity is null or not an @Indexed entity
+ */
+ <T> void index(T entity);
+
+ /**
+ * @return the <code>SearchFactory</code> instance.
+ */
+ SearchFactory getSearchFactory();
+
+ /**
+ * Remove the entity with the type <code>entityType</code> and the identifier <code>id</code> from the index.
+ * If <code>id == null</code> all indexed entities of this type and its indexed subclasses are deleted. In this
+ * case this method behaves like {@link #purgeAll(Class)}.
+ *
+ * @param entityType The type of the entity to delete.
+ * @param id The id of the entity to delete.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purge(Class<T> entityType, Serializable id);
+
+ /**
+ * Remove all entities from of particular class and all its subclasses from the index.
+ *
+ * @param entityType The class of the entities to remove.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purgeAll(Class<T> entityType);
+
+ /**
+ * Flush all index changes forcing Hibernate Search to apply all changes to the index not waiting for the batch limit.
+ */
+ public void flushToIndexes();
+
+ /**
+ * Creates a MassIndexer to rebuild the indexes of some
+ * or all indexed entity types.
+ * Instances cannot be reused.
+ * @param types optionally restrict the operation to selected types
+ * @return
+ */
+ public MassIndexer createIndexer(Class<?>... types);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextEntityManager.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextQuery.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextQuery.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextQuery.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,126 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.jpa;
+
+import javax.persistence.Query;
+
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.Explanation;
+import org.hibernate.Criteria;
+import org.hibernate.transform.ResultTransformer;
+import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.FullTextFilter;
+
+/**
+ * The base interface for lucene powered searches.
+ * This extends the JPA Query interface
+ *
+ * @author Hardy Ferentschik
+ * @author Emmanuel Bernard
+ */
+//TODO return FullTextQuery rather than Query in useful chain methods
+public interface FullTextQuery extends Query, ProjectionConstants {
+
+ /**
+ * Allows to let lucene sort the results. This is useful when you have
+ * additional sort requirements on top of the default lucene ranking.
+ * Without lucene sorting you would have to retrieve the full result set and
+ * order the hibernate objects.
+ *
+ * @param sort The lucene sort object.
+ * @return this for method chaining
+ */
+ FullTextQuery setSort(Sort sort);
+
+ /**
+ * Allows to use lucene filters.
+ * Semi-deprecated? a preferred way is to use the @FullTextFilterDef approach
+ *
+ * @param filter The lucene filter.
+ * @return this for method chaining
+ */
+ FullTextQuery setFilter(Filter filter);
+
+ /**
+ * Returns the number of hits for this search
+ *
+ * Caution:
+ * The number of results might be slightly different from
+ * <code>getResultList().size()</code> because getResultList()
+ * may be not in sync with the database at the time of query.
+ */
+ int getResultSize();
+
+ /**
+ * Defines the Database Query used to load the Lucene results.
+ * Useful to load a given object graph by refining the fetch modes
+ *
+ * No projection (criteria.setProjection() ) allowed, the root entity must be the only returned type
+ * No where restriction can be defined either.
+ *
+ */
+ FullTextQuery setCriteriaQuery(Criteria criteria);
+
+ /**
+ * Defines the Lucene field names projected and returned in a query result
+ * Each field is converted back to it's object representation, an Object[] being returned for each "row"
+ * (similar to an HQL or a Criteria API projection).
+ *
+ * A projectable field must be stored in the Lucene index and use a {@link org.hibernate.search.bridge.TwoWayFieldBridge}
+ * Unless notified in their JavaDoc, all built-in bridges are two-way. All @DocumentId fields are projectable by design.
+ *
+ * If the projected field is not a projectable field, null is returned in the object[]
+ *
+ */
+ FullTextQuery setProjection(String... fields);
+
+ /**
+ * Enable a given filter by its name. Returns a FullTextFilter object that allows filter parameter injection
+ */
+ FullTextFilter enableFullTextFilter(String name);
+
+ /**
+ * Disable a given filter by its name
+ */
+ void disableFullTextFilter(String name);
+
+ /**
+ *
+ * defines a result transformer used during projection
+ *
+ */
+ FullTextQuery setResultTransformer(ResultTransformer transformer);
+
+ /**
+ * Return the Lucene {@link org.apache.lucene.search.Explanation}
+ * object describing the score computation for the matching object/document
+ * in the current query
+ *
+ * @param documentId Lucene Document id to be explain. This is NOT the object id
+ * @return Lucene Explanation
+ */
+ Explanation explain(int documentId);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/FullTextQuery.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/Search.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/Search.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/Search.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,62 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.jpa;
+
+import javax.persistence.EntityManager;
+
+import org.hibernate.search.jpa.impl.FullTextEntityManagerImpl;
+
+/**
+ * Helper class that should be used when building a FullTextEntityManager
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public final class Search {
+
+ private Search() {
+ }
+
+ /**
+ * Build a full text capable EntityManager
+ * The underlying EM implementation has to be Hibernate EntityManager
+ */
+ public static FullTextEntityManager getFullTextEntityManager(EntityManager em) {
+ if ( em instanceof FullTextEntityManagerImpl ) {
+ return (FullTextEntityManager) em;
+ }
+ else {
+ return new FullTextEntityManagerImpl(em);
+ }
+ }
+
+ /**
+ * @deprecated As of release 3.1.0, replaced by {@link #getFullTextEntityManager}
+ */
+ @Deprecated
+ public static FullTextEntityManager createFullTextEntityManager(EntityManager em) {
+ return getFullTextEntityManager(em);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/Search.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,293 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.jpa.impl;
+
+import java.io.Serializable;
+import java.util.Map;
+import javax.persistence.EntityManager;
+import javax.persistence.FlushModeType;
+import javax.persistence.LockModeType;
+import javax.persistence.Query;
+import javax.persistence.EntityTransaction;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.TypedQuery;
+import javax.persistence.metamodel.Metamodel;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.CriteriaBuilder;
+
+import org.hibernate.search.jpa.FullTextEntityManager;
+import org.hibernate.search.jpa.FullTextQuery;
+import org.hibernate.search.MassIndexer;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Session;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FullTextEntityManagerImpl implements FullTextEntityManager, Serializable {
+ private final EntityManager em;
+ private FullTextSession ftSession;
+
+ public FullTextEntityManagerImpl(EntityManager em) {
+ this.em = em;
+ }
+
+ private FullTextSession getFullTextSession() {
+ if ( ftSession == null ) {
+ Object delegate = em.getDelegate();
+ if ( delegate == null ) {
+ throw new SearchException(
+ "Trying to use Hibernate Search without an Hibernate EntityManager (no delegate)"
+ );
+ }
+ else if ( Session.class.isAssignableFrom( delegate.getClass() ) ) {
+ ftSession = Search.getFullTextSession( ( Session ) delegate );
+ }
+ else if ( EntityManager.class.isAssignableFrom( delegate.getClass() ) ) {
+ //Some app servers wrap the EM twice
+ delegate = ( ( EntityManager ) delegate ).getDelegate();
+ if ( delegate == null ) {
+ throw new SearchException(
+ "Trying to use Hibernate Search without an Hibernate EntityManager (no delegate)"
+ );
+ }
+ else if ( Session.class.isAssignableFrom( delegate.getClass() ) ) {
+ ftSession = Search.getFullTextSession( ( Session ) delegate );
+ }
+ else {
+ throw new SearchException(
+ "Trying to use Hibernate Search without an Hibernate EntityManager: " + delegate.getClass()
+ );
+ }
+ }
+ else {
+ throw new SearchException(
+ "Trying to use Hibernate Search without an Hibernate EntityManager: " + delegate.getClass()
+ );
+ }
+ }
+ return ftSession;
+ }
+
+ public FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class<?>... entities) {
+ FullTextSession ftSession = getFullTextSession();
+ return new FullTextQueryImpl( ftSession.createFullTextQuery( luceneQuery, entities ), ftSession );
+ }
+
+ public <T> void index(T entity) {
+ getFullTextSession().index( entity );
+ }
+
+ public SearchFactory getSearchFactory() {
+ return getFullTextSession().getSearchFactory();
+ }
+
+ public <T> void purge(Class<T> entityType, Serializable id) {
+ getFullTextSession().purge( entityType, id );
+ }
+
+ public <T> void purgeAll(Class<T> entityType) {
+ getFullTextSession().purgeAll( entityType );
+ }
+
+ public void flushToIndexes() {
+ getFullTextSession().flushToIndexes();
+ }
+
+ public void persist(Object entity) {
+ em.persist( entity );
+ }
+
+ public <T> T merge(T entity) {
+ return em.merge( entity );
+ }
+
+ public void remove(Object entity) {
+ em.remove( entity );
+ }
+
+ public <T> T find(Class<T> entityClass, Object primaryKey) {
+ return em.find( entityClass, primaryKey );
+ }
+
+ public <T> T find(Class<T> entityClass, Object primaryKey, Map<String, Object> hints) {
+ return em.find( entityClass, primaryKey, hints );
+ }
+
+ public <T> T find(Class<T> entityClass, Object primaryKey, LockModeType lockModeType) {
+ return em.find( entityClass, primaryKey, lockModeType );
+ }
+
+ public <T> T find(Class<T> entityClass, Object primaryKey, LockModeType lockModeType, Map<String, Object> hints) {
+ return em.find( entityClass, primaryKey, lockModeType, hints );
+ }
+
+ public <T> T getReference(Class<T> entityClass, Object primaryKey) {
+ return em.getReference( entityClass, primaryKey );
+ }
+
+ public void flush() {
+ em.flush();
+ }
+
+ public void setFlushMode(FlushModeType flushMode) {
+ em.setFlushMode( flushMode );
+ }
+
+ public FlushModeType getFlushMode() {
+ return em.getFlushMode();
+ }
+
+ public void lock(Object entity, LockModeType lockMode) {
+ em.lock( entity, lockMode );
+ }
+
+ public void lock(Object entity, LockModeType lockModeType, Map<String, Object> hints) {
+ em.lock( entity, lockModeType, hints );
+ }
+
+ public void refresh(Object entity) {
+ em.refresh( entity );
+ }
+
+ public void refresh(Object entity, Map<String, Object> hints) {
+ em.refresh( entity, hints );
+ }
+
+ public void refresh(Object entity, LockModeType lockModeType) {
+ em.refresh( entity, lockModeType );
+ }
+
+ public void refresh(Object entity, LockModeType lockModeType, Map<String, Object> hints) {
+ em.refresh( entity, lockModeType, hints );
+ }
+
+ public void clear() {
+ em.clear();
+ }
+
+ public void detach(Object entity) {
+ em.detach( entity );
+ }
+
+ public boolean contains(Object entity) {
+ return em.contains( entity );
+ }
+
+ public LockModeType getLockMode(Object entity) {
+ return em.getLockMode( entity );
+ }
+
+ public void setProperty(String key, Object value) {
+ em.setProperty( key, value );
+ }
+
+ public Map<String, Object> getProperties() {
+ return em.getProperties();
+ }
+
+ public Query createQuery(String ejbqlString) {
+ return em.createQuery( ejbqlString );
+ }
+
+ public <T> TypedQuery<T> createQuery(CriteriaQuery<T> criteriaQuery) {
+ return em.createQuery( criteriaQuery );
+ }
+
+ public <T> TypedQuery<T> createQuery(String qlString, Class<T> resultClass) {
+ return em.createQuery( qlString, resultClass );
+ }
+
+ public Query createNamedQuery(String name) {
+ return em.createNamedQuery( name );
+ }
+
+ public <T> TypedQuery<T> createNamedQuery(String name, Class<T> resultClass) {
+ return em.createNamedQuery( name, resultClass );
+ }
+
+ public Query createNativeQuery(String sqlString) {
+ return em.createNativeQuery( sqlString );
+ }
+
+ public Query createNativeQuery(String sqlString, Class resultClass) {
+ return em.createNativeQuery( sqlString, resultClass );
+ }
+
+ public Query createNativeQuery(String sqlString, String resultSetMapping) {
+ return em.createNativeQuery( sqlString, resultSetMapping );
+ }
+
+ public void joinTransaction() {
+ em.joinTransaction();
+ }
+
+ public <T> T unwrap(Class<T> type) {
+ if ( type.equals( FullTextSession.class ) ) {
+ @SuppressWarnings("unchecked")
+ final T ftSession = ( T ) Search.getFullTextSession( em.unwrap( Session.class ) );
+ return ftSession;
+ }
+ else {
+ return em.unwrap( type );
+ }
+ }
+
+ public Object getDelegate() {
+ return em.getDelegate();
+ }
+
+ public void close() {
+ em.close();
+ }
+
+ public boolean isOpen() {
+ return em.isOpen();
+ }
+
+ public EntityTransaction getTransaction() {
+ return em.getTransaction();
+ }
+
+ public EntityManagerFactory getEntityManagerFactory() {
+ return em.getEntityManagerFactory();
+ }
+
+ public CriteriaBuilder getCriteriaBuilder() {
+ return em.getCriteriaBuilder();
+ }
+
+ public Metamodel getMetamodel() {
+ return em.getMetamodel();
+ }
+
+ public MassIndexer createIndexer(Class<?>... types) {
+ return getFullTextSession().createIndexer( types );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextEntityManagerImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,404 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.jpa.impl;
+
+import java.io.Serializable;
+import java.util.*;
+import javax.persistence.EntityExistsException;
+import javax.persistence.EntityNotFoundException;
+import javax.persistence.FlushModeType;
+import javax.persistence.NoResultException;
+import javax.persistence.NonUniqueResultException;
+import javax.persistence.OptimisticLockException;
+import javax.persistence.PersistenceException;
+import javax.persistence.Query;
+import javax.persistence.TemporalType;
+import javax.persistence.LockModeType;
+import javax.persistence.Parameter;
+
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.Explanation;
+
+import org.hibernate.Criteria;
+import org.hibernate.FlushMode;
+import org.hibernate.HibernateException;
+import org.hibernate.ObjectNotFoundException;
+import org.hibernate.QueryException;
+import org.hibernate.Session;
+import org.hibernate.StaleObjectStateException;
+import org.hibernate.StaleStateException;
+import org.hibernate.TransientObjectException;
+import org.hibernate.TypeMismatchException;
+import org.hibernate.UnresolvableObjectException;
+import org.hibernate.exception.ConstraintViolationException;
+import org.hibernate.hql.QueryExecutionRequestException;
+import org.hibernate.search.FullTextFilter;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.jpa.FullTextQuery;
+import org.hibernate.transform.ResultTransformer;
+
+/**
+ * Implements JPA 2 query interface and delegate the call to
+ * a Hibernate Core FullTextQuery.
+ * This has the consequence of "duplicating" the JPA 2 query logic in some areas.
+ *
+ * @author Emmanuel Bernard
+ */
+public class FullTextQueryImpl implements FullTextQuery {
+ private final org.hibernate.search.FullTextQuery query;
+ private final Session session;
+ private Integer firstResult;
+ private Integer maxResults;
+ //initialized at 0 since we don't expect to use hints at this stage
+ private final Map<String, Object> hints = new HashMap<String, Object>( 0 );
+ private FlushModeType jpaFlushMode;
+
+ public FullTextQueryImpl(org.hibernate.search.FullTextQuery query, Session session) {
+ this.query = query;
+ this.session = session;
+ }
+
+ public FullTextQuery setSort(Sort sort) {
+ query.setSort( sort );
+ return this;
+ }
+
+ public FullTextQuery setFilter(Filter filter) {
+ query.setFilter( filter );
+ return this;
+ }
+
+ public int getResultSize() {
+ return query.getResultSize();
+ }
+
+ public FullTextQuery setCriteriaQuery(Criteria criteria) {
+ query.setCriteriaQuery( criteria );
+ return this;
+ }
+
+ public FullTextQuery setProjection(String... fields) {
+ query.setProjection( fields );
+ return this;
+ }
+
+ public FullTextFilter enableFullTextFilter(String name) {
+ return query.enableFullTextFilter( name );
+ }
+
+ public void disableFullTextFilter(String name) {
+ query.disableFullTextFilter( name );
+ }
+
+ public FullTextQuery setResultTransformer(ResultTransformer transformer) {
+ query.setResultTransformer( transformer );
+ return this;
+ }
+
+ public List getResultList() {
+ try {
+ return query.list();
+ }
+ catch ( QueryExecutionRequestException he ) {
+ //TODO when an illegal state exception should be raised?
+ throw new IllegalStateException( he );
+ }
+ catch ( TypeMismatchException e ) {
+ //TODO when an illegal arg exception should be raised?
+ throw new IllegalArgumentException( e );
+ }
+ catch ( SearchException he ) {
+ throwPersistenceException( he );
+ throw he;
+ }
+ }
+
+ //TODO mutualize this code with the EM this will fix the rollback issues
+
+ @SuppressWarnings({ "ThrowableInstanceNeverThrown" })
+ private void throwPersistenceException(Exception e) {
+ if ( e instanceof StaleStateException ) {
+ PersistenceException pe = wrapStaleStateException( ( StaleStateException ) e );
+ throwPersistenceException( pe );
+ }
+ else if ( e instanceof ConstraintViolationException ) {
+ //FIXME this is bad cause ConstraintViolationException happens in other circumstances
+ throwPersistenceException( new EntityExistsException( e ) );
+ }
+ else if ( e instanceof ObjectNotFoundException ) {
+ throwPersistenceException( new EntityNotFoundException( e.getMessage() ) );
+ }
+ else if ( e instanceof org.hibernate.NonUniqueResultException ) {
+ throwPersistenceException( new NonUniqueResultException( e.getMessage() ) );
+ }
+ else if ( e instanceof UnresolvableObjectException ) {
+ throwPersistenceException( new EntityNotFoundException( e.getMessage() ) );
+ }
+ else if ( e instanceof QueryException ) {
+ throw new IllegalArgumentException( e );
+ }
+ else if ( e instanceof TransientObjectException ) {
+ //FIXME rollback
+ throw new IllegalStateException( e ); //Spec 3.2.3 Synchronization rules
+ }
+ else {
+ throwPersistenceException( new PersistenceException( e ) );
+ }
+ }
+
+ void throwPersistenceException(PersistenceException e) {
+ if ( !( e instanceof NoResultException || e instanceof NonUniqueResultException ) ) {
+ //FIXME rollback
+ }
+ throw e;
+ }
+
+ @SuppressWarnings({ "ThrowableInstanceNeverThrown" })
+ PersistenceException wrapStaleStateException(StaleStateException e) {
+ PersistenceException pe;
+ if ( e instanceof StaleObjectStateException ) {
+ StaleObjectStateException sose = ( StaleObjectStateException ) e;
+ Serializable identifier = sose.getIdentifier();
+ if ( identifier != null ) {
+ Object entity = session.load( sose.getEntityName(), identifier );
+ if ( entity instanceof Serializable ) {
+ //avoid some user errors regarding boundary crossing
+ pe = new OptimisticLockException( null, e, entity );
+ }
+ else {
+ pe = new OptimisticLockException( e );
+ }
+ }
+ else {
+ pe = new OptimisticLockException( e );
+ }
+ }
+ else {
+ pe = new OptimisticLockException( e );
+ }
+ return pe;
+ }
+
+ @SuppressWarnings({ "ThrowableInstanceNeverThrown" })
+ public Object getSingleResult() {
+ try {
+ List result = query.list();
+ if ( result.size() == 0 ) {
+ throwPersistenceException( new NoResultException( "No entity found for query" ) );
+ }
+ else if ( result.size() > 1 ) {
+ Set uniqueResult = new HashSet( result );
+ if ( uniqueResult.size() > 1 ) {
+ throwPersistenceException( new NonUniqueResultException( "result returns " + uniqueResult.size() + " elements" ) );
+ }
+ else {
+ return uniqueResult.iterator().next();
+ }
+
+ }
+ else {
+ return result.get( 0 );
+ }
+ return null; //should never happen
+ }
+ catch ( QueryExecutionRequestException he ) {
+ throw new IllegalStateException( he );
+ }
+ catch ( TypeMismatchException e ) {
+ throw new IllegalArgumentException( e );
+ }
+ catch ( HibernateException he ) {
+ throwPersistenceException( he );
+ return null;
+ }
+ }
+
+ public Query setMaxResults(int maxResults) {
+ if ( maxResults < 0 ) {
+ throw new IllegalArgumentException(
+ "Negative ("
+ + maxResults
+ + ") parameter passed in to setMaxResults"
+ );
+ }
+ query.setMaxResults( maxResults );
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public int getMaxResults() {
+ return maxResults == null || maxResults == -1
+ ? Integer.MAX_VALUE
+ : maxResults;
+ }
+
+ public Query setFirstResult(int firstResult) {
+ if ( firstResult < 0 ) {
+ throw new IllegalArgumentException(
+ "Negative ("
+ + firstResult
+ + ") parameter passed in to setFirstResult"
+ );
+ }
+ query.setFirstResult( firstResult );
+ this.firstResult = firstResult;
+ return this;
+ }
+
+ public int getFirstResult() {
+ return firstResult == null ? 0 : firstResult;
+ }
+
+ public Explanation explain(int documentId) {
+ return query.explain( documentId );
+ }
+
+ public int executeUpdate() {
+ throw new IllegalStateException( "Update not allowed in FullTextQueries" );
+ }
+
+ public Query setHint(String hintName, Object value) {
+ hints.put( hintName, value );
+ return this;
+ }
+
+ public Map<String, Object> getHints() {
+ return hints;
+ }
+
+ public <T> Query setParameter(Parameter<T> tParameter, T t) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(Parameter<Calendar> calendarParameter, Calendar calendar, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(Parameter<Date> dateParameter, Date date, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(String name, Object value) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(String name, Date value, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(String name, Calendar value, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(int position, Object value) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setParameter(int position, Date value, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Set<Parameter<?>> getParameters() {
+ return Collections.EMPTY_SET;
+ }
+
+ public Query setParameter(int position, Calendar value, TemporalType temporalType) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Parameter<?> getParameter(String name) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Parameter<?> getParameter(int position) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public <T> Parameter<T> getParameter(String name, Class<T> type) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public <T> Parameter<T> getParameter(int position, Class<T> type) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public boolean isBound(Parameter<?> param) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public <T> T getParameterValue(Parameter<T> param) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Object getParameterValue(String name) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Object getParameterValue(int position) {
+ throw new UnsupportedOperationException( "parameters not supported in fullText queries" );
+ }
+
+ public Query setFlushMode(FlushModeType flushMode) {
+ this.jpaFlushMode = flushMode;
+ if ( flushMode == FlushModeType.AUTO ) {
+ query.setFlushMode( FlushMode.AUTO );
+ }
+ else if ( flushMode == FlushModeType.COMMIT ) {
+ query.setFlushMode( FlushMode.COMMIT );
+ }
+ return this;
+ }
+
+ public FlushModeType getFlushMode() {
+ if ( jpaFlushMode != null ) {
+ return jpaFlushMode;
+ }
+ final FlushMode hibernateFlushMode = session.getFlushMode();
+ if ( FlushMode.AUTO == hibernateFlushMode ) {
+ return FlushModeType.AUTO;
+ }
+ else if ( FlushMode.COMMIT == hibernateFlushMode ) {
+ return FlushModeType.COMMIT;
+ }
+ else {
+ return null; //incompatible flush mode
+ }
+ }
+
+ public Query setLockMode(LockModeType lockModeType) {
+ throw new UnsupportedOperationException( "lock modes not supported in fullText queries" );
+ }
+
+ public LockModeType getLockMode() {
+ throw new UnsupportedOperationException( "lock modes not supported in fullText queries" );
+ }
+
+ public <T> T unwrap(Class<T> type) {
+ //I've purposely decided not to return the underlying Hibernate FullTextQuery
+ //as I see this as an implementation detail that should not be exposed.
+ return query.unwrap( type );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/jpa/impl/FullTextQueryImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextFilterImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextFilterImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextFilterImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hibernate.search.FullTextFilter;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FullTextFilterImpl implements FullTextFilterImplementor {
+ private final Map<String, Object> parameters = new HashMap<String, Object>();
+ private String name;
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public FullTextFilter setParameter(String name, Object value) {
+ parameters.put( name, value );
+ return this;
+ }
+
+ public Object getParameter(String name) {
+ return parameters.get( name );
+ }
+
+ public Map<String, Object> getParameters() {
+ return parameters;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextFilterImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextQueryImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextQueryImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,966 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.Similarity;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.slf4j.Logger;
+
+import org.hibernate.Criteria;
+import org.hibernate.HibernateException;
+import org.hibernate.LockMode;
+import org.hibernate.Query;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Session;
+import org.hibernate.LockOptions;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.engine.query.ParameterMetadata;
+import org.hibernate.impl.AbstractQueryImpl;
+import org.hibernate.impl.CriteriaImpl;
+import org.hibernate.search.FullTextFilter;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.DocumentExtractor;
+import org.hibernate.search.engine.EntityInfo;
+import org.hibernate.search.engine.FilterDef;
+import org.hibernate.search.engine.Loader;
+import org.hibernate.search.engine.MultiClassesQueryLoader;
+import org.hibernate.search.engine.ProjectionLoader;
+import org.hibernate.search.engine.QueryLoader;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.filter.ChainedFilter;
+import org.hibernate.search.filter.FilterKey;
+import org.hibernate.search.filter.StandardFilterKey;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+import org.hibernate.search.filter.ShardSensitiveOnlyFilter;
+import org.hibernate.search.reader.ReaderProvider;
+
+import static org.hibernate.search.reader.ReaderProviderHelper.getIndexReaders;
+
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.util.ContextHelper;
+
+import static org.hibernate.search.util.FilterCacheModeTypeHelper.cacheInstance;
+import static org.hibernate.search.util.FilterCacheModeTypeHelper.cacheResults;
+
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.transform.ResultTransformer;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * Implementation of {@link org.hibernate.search.FullTextQuery}.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ * @todo Implements setParameter()
+ */
+public class FullTextQueryImpl extends AbstractQueryImpl implements FullTextQuery {
+ private static final Logger log = LoggerFactory.make();
+ private final org.apache.lucene.search.Query luceneQuery;
+ private Set<Class<?>> indexedTargetedEntities;
+ private List<Class<?>> targetedEntities;
+ private Set<Class<?>> classesAndSubclasses;
+ //optimization: if we can avoid the filter clause (we can most of the time) do it as it has a significant perf impact
+ private boolean needClassFilterClause;
+ private Integer firstResult;
+ private Integer maxResults;
+ private Integer resultSize;
+ private Sort sort;
+ private Filter filter;
+ private Filter userFilter;
+ private Criteria criteria;
+ private String[] indexProjection;
+ private Set<String> idFieldNames;
+ private boolean allowFieldSelectionInProjection = true;
+ private ResultTransformer resultTransformer;
+ private SearchFactoryImplementor searchFactoryImplementor;
+ private final Map<String, FullTextFilterImpl> filterDefinitions = new HashMap<String, FullTextFilterImpl>();
+ private int fetchSize = 1;
+ private static final FullTextFilterImplementor[] EMPTY_FULL_TEXT_FILTER_IMPLEMENTOR = new FullTextFilterImplementor[0];
+
+
+ /**
+ * Constructs a <code>FullTextQueryImpl</code> instance.
+ *
+ * @param query The Lucene query.
+ * @param classes Array of classes (must be immutable) used to filter the results to the given class types.
+ * @param session Access to the Hibernate session.
+ * @param parameterMetadata Additional query metadata.
+ */
+ public FullTextQueryImpl(org.apache.lucene.search.Query query, Class<?>[] classes, SessionImplementor session,
+ ParameterMetadata parameterMetadata) {
+ //TODO handle flushMode
+ super( query.toString(), null, session, parameterMetadata );
+ this.luceneQuery = query;
+ this.targetedEntities = Arrays.asList( classes );
+ searchFactoryImplementor = getSearchFactoryImplementor();
+ this.indexedTargetedEntities = searchFactoryImplementor.getIndexedTypesPolymorphic( classes );
+ if ( classes != null && classes.length > 0 && indexedTargetedEntities.size() == 0 ) {
+ String msg = "None of the specified entity types or any of their subclasses are indexed.";
+ throw new IllegalArgumentException( msg );
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public FullTextQuery setSort(Sort sort) {
+ this.sort = sort;
+ return this;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public FullTextQuery setFilter(Filter filter) {
+ this.userFilter = filter;
+ return this;
+ }
+
+ /**
+ * Return an iterator on the results.
+ * Retrieve the object one by one (initialize it during the next() operation)
+ */
+ public Iterator iterate() throws HibernateException {
+ //implement an iterator which keep the id/class for each hit and get the object on demand
+ //cause I can't keep the searcher and hence the hit opened. I don't have any hook to know when the
+ //user stops using it
+ //scrollable is better in this area
+
+ //find the directories
+ IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
+ if ( searcher == null ) {
+ return new IteratorImpl( Collections.EMPTY_LIST, noLoader );
+ }
+ try {
+ QueryHits queryHits = getQueryHits( searcher, calculateTopDocsRetrievalSize() );
+ int first = first();
+ int max = max( first, queryHits.totalHits );
+
+ int size = max - first + 1 < 0 ? 0 : max - first + 1;
+ List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactoryImplementor, indexProjection, idFieldNames, allowFieldSelectionInProjection
+ );
+ for ( int index = first; index <= max; index++ ) {
+ infos.add( extractor.extract( index ) );
+ }
+ Loader loader = getLoader();
+ return new IteratorImpl( infos, loader );
+ }
+ catch ( IOException e ) {
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ finally {
+ try {
+ closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
+ }
+ catch ( SearchException e ) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ }
+
+ /**
+ * Decide which object loader to use depending on the targeted entities. If there is only a single entity targeted
+ * a <code>QueryLoader</code> can be used which will only execute a single query to load the entities. If more than
+ * one entity is targeted a <code>MultiClassesQueryLoader</code> must be used. We also have to consider whether
+ * projections or <code>Criteria</code> are used.
+ *
+ * @return The loader instance to use to load the results of the query.
+ */
+ private Loader getLoader() {
+ Loader loader;
+ if ( indexProjection != null ) {
+ loader = getProjectionLoader();
+ }
+ else if ( criteria != null ) {
+ loader = getCriteriaLoader();
+ }
+ else if ( targetedEntities.size() == 1 ) {
+ loader = getSingleEntityLoader();
+ }
+ else {
+ loader = getMultipleEntitiesLoader();
+ }
+ return loader;
+ }
+
+ private Loader getMultipleEntitiesLoader() {
+ final MultiClassesQueryLoader multiClassesLoader = new MultiClassesQueryLoader();
+ multiClassesLoader.init( ( Session ) session, searchFactoryImplementor );
+ multiClassesLoader.setEntityTypes( indexedTargetedEntities );
+ return multiClassesLoader;
+ }
+
+ private Loader getSingleEntityLoader() {
+ final QueryLoader queryLoader = new QueryLoader();
+ queryLoader.init( ( Session ) session, searchFactoryImplementor );
+ queryLoader.setEntityType( targetedEntities.iterator().next() );
+ return queryLoader;
+ }
+
+ private Loader getCriteriaLoader() {
+ if ( targetedEntities.size() > 1 ) {
+ throw new SearchException( "Cannot mix criteria and multiple entity types" );
+ }
+ Class entityType = targetedEntities.size() == 0 ? null : targetedEntities.iterator().next();
+ if ( criteria instanceof CriteriaImpl ) {
+ String targetEntity = ( ( CriteriaImpl ) criteria ).getEntityOrClassName();
+ if ( entityType != null && !entityType.getName().equals( targetEntity ) ) {
+ throw new SearchException( "Criteria query entity should match query entity" );
+ }
+ else {
+ try {
+ entityType = ReflectHelper.classForName( targetEntity );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException( "Unable to load entity class from criteria: " + targetEntity, e );
+ }
+ }
+ }
+ QueryLoader queryLoader = new QueryLoader();
+ queryLoader.init( ( Session ) session, searchFactoryImplementor );
+ queryLoader.setEntityType( entityType );
+ queryLoader.setCriteria( criteria );
+ return queryLoader;
+ }
+
+ private Loader getProjectionLoader() {
+ ProjectionLoader loader = new ProjectionLoader();
+ loader.init( ( Session ) session, searchFactoryImplementor, resultTransformer, indexProjection );
+ loader.setEntityTypes( indexedTargetedEntities );
+ return loader;
+ }
+
+ public ScrollableResults scroll() throws HibernateException {
+ //keep the searcher open until the resultset is closed
+
+ //find the directories
+ IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
+ //FIXME: handle null searcher
+ try {
+ QueryHits queryHits = getQueryHits( searcher, calculateTopDocsRetrievalSize() );
+ int first = first();
+ int max = max( first, queryHits.totalHits );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactoryImplementor, indexProjection, idFieldNames, allowFieldSelectionInProjection
+ );
+ Loader loader = getLoader();
+ return new ScrollableResultsImpl(
+ searcher, first, max, fetchSize, extractor, loader, searchFactoryImplementor, this.session
+ );
+ }
+ catch ( IOException e ) {
+ //close only in case of exception
+ try {
+ closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
+ }
+ catch ( SearchException ee ) {
+ //we have the initial issue already
+ }
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ }
+
+ public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException {
+ //TODO think about this scrollmode
+ return scroll();
+ }
+
+ public List list() throws HibernateException {
+ //find the directories
+ IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
+ if ( searcher == null ) {
+ return Collections.EMPTY_LIST;
+ }
+ try {
+ QueryHits queryHits = getQueryHits( searcher, calculateTopDocsRetrievalSize() );
+ int first = first();
+ int max = max( first, queryHits.totalHits );
+
+ int size = max - first + 1 < 0 ? 0 : max - first + 1;
+ List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
+ DocumentExtractor extractor = new DocumentExtractor(
+ queryHits, searchFactoryImplementor, indexProjection, idFieldNames, allowFieldSelectionInProjection
+ );
+ for ( int index = first; index <= max; index++ ) {
+ infos.add( extractor.extract( index ) );
+ }
+ Loader loader = getLoader();
+ List list = loader.load( infos.toArray( new EntityInfo[infos.size()] ) );
+ if ( resultTransformer == null || loader instanceof ProjectionLoader ) {
+ //stay consistent with transformTuple which can only be executed during a projection
+ return list;
+ }
+ else {
+ return resultTransformer.transformList( list );
+ }
+ }
+ catch ( IOException e ) {
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ finally {
+ try {
+ closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
+ }
+ catch ( SearchException e ) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ }
+
+ public Explanation explain(int documentId) {
+ Explanation explanation = null;
+ Searcher searcher = buildSearcher( searchFactoryImplementor );
+ if ( searcher == null ) {
+ throw new SearchException(
+ "Unable to build explanation for document id:"
+ + documentId + ". no index found"
+ );
+ }
+ try {
+ org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
+ buildFilters();
+ explanation = searcher.explain( query, documentId );
+ }
+ catch ( IOException e ) {
+ throw new HibernateException( "Unable to query Lucene index and build explanation", e );
+ }
+ finally {
+ //searcher cannot be null
+ try {
+ closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
+ }
+ catch ( SearchException e ) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ return explanation;
+ }
+
+ /**
+ * Execute the lucene search and return the matching hits.
+ *
+ * @param searcher The index searcher.
+ * @param n Numer of documents to retrieve
+ *
+ * @return An instance of <code>QueryHits</code> wrapping the Lucene query and the matching documents.
+ *
+ * @throws IOException in case there is an error executing the lucene search.
+ */
+ private QueryHits getQueryHits(Searcher searcher, Integer n) throws IOException {
+ org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
+ buildFilters();
+ QueryHits queryHits;
+ if ( n == null ) { // try to make sure that we get the right amount of top docs
+ queryHits = new QueryHits( searcher, query, filter, sort );
+ }
+ else {
+ queryHits = new QueryHits( searcher, query, filter, sort, n );
+ }
+ resultSize = queryHits.totalHits;
+ return queryHits;
+ }
+
+ /**
+ * @return Calculates the number of <code>TopDocs</code> which should be retrieved as part of the query. If Hibernate's
+ * pagination parameters are set returned value is <code>first + maxResults</code>. Otherwise <code>null</code> is
+ * returned.
+ */
+ private Integer calculateTopDocsRetrievalSize() {
+ if ( maxResults == null ) {
+ return null;
+ }
+ else {
+ long tmpMaxResult = ( long ) first() + maxResults;
+ if ( tmpMaxResult >= Integer.MAX_VALUE ) {
+ // don't return just Integer.MAX_VALUE due to a bug in Lucene - see HSEARCH-330
+ return Integer.MAX_VALUE - 1;
+ }
+ else {
+ return ( int ) tmpMaxResult;
+ }
+ }
+ }
+
+ private void buildFilters() {
+ ChainedFilter chainedFilter = null;
+ if ( !filterDefinitions.isEmpty() ) {
+ chainedFilter = new ChainedFilter();
+ for ( FullTextFilterImpl fullTextFilter : filterDefinitions.values() ) {
+ Filter filter = buildLuceneFilter( fullTextFilter );
+ if ( filter != null ) {
+ chainedFilter.addFilter( filter );
+ }
+ }
+ }
+
+ if ( userFilter != null ) {
+ //chainedFilter is not always necessary here but the code is easier to read
+ if ( chainedFilter == null ) {
+ chainedFilter = new ChainedFilter();
+ }
+ chainedFilter.addFilter( userFilter );
+ }
+
+ if ( chainedFilter == null || chainedFilter.isEmpty() ) {
+ filter = null;
+ }
+ else {
+ filter = chainedFilter;
+ }
+ }
+
+ /**
+ * Builds a Lucene filter using the given <code>FullTextFilter</code>.
+ *
+ * @param fullTextFilter the Hibernate specific <code>FullTextFilter</code> used to create the
+ * Lucene <code>Filter</code>.
+ *
+ * @return the Lucene filter mapped to the filter definition
+ */
+ private Filter buildLuceneFilter(FullTextFilterImpl fullTextFilter) {
+
+ /*
+ * FilterKey implementations and Filter(Factory) do not have to be threadsafe wrt their parameter injection
+ * as FilterCachingStrategy ensure a memory barrier between concurrent thread calls
+ */
+ FilterDef def = searchFactoryImplementor.getFilterDefinition( fullTextFilter.getName() );
+ //def can never be null, ti's guarded by enableFullTextFilter(String)
+
+ if ( isPreQueryFilterOnly( def ) ) {
+ return null;
+ }
+
+ Object instance = createFilterInstance( fullTextFilter, def );
+ FilterKey key = createFilterKey( def, instance );
+
+ // try to get the filter out of the cache
+ Filter filter = cacheInstance( def.getCacheMode() ) ?
+ searchFactoryImplementor.getFilterCachingStrategy().getCachedFilter( key ) :
+ null;
+
+ if ( filter == null ) {
+ filter = createFilter( def, instance );
+
+ // add filter to cache if we have to
+ if ( cacheInstance( def.getCacheMode() ) ) {
+ searchFactoryImplementor.getFilterCachingStrategy().addCachedFilter( key, filter );
+ }
+ }
+ return filter;
+ }
+
+ private boolean isPreQueryFilterOnly(FilterDef def) {
+ return def.getImpl().equals( ShardSensitiveOnlyFilter.class );
+ }
+
+ private Filter createFilter(FilterDef def, Object instance) {
+ Filter filter;
+ if ( def.getFactoryMethod() != null ) {
+ try {
+ filter = ( Filter ) def.getFactoryMethod().invoke( instance );
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException(
+ "Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
+ }
+ catch ( InvocationTargetException e ) {
+ throw new SearchException(
+ "Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
+ }
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "@Key method does not return a org.apache.lucene.search.Filter class: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
+ }
+ }
+ else {
+ try {
+ filter = ( Filter ) instance;
+ }
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "Filter implementation does not implement the Filter interface: "
+ + def.getImpl().getName() + ". "
+ + ( def.getFactoryMethod() != null ? def.getFactoryMethod().getName() : "" ), e
+ );
+ }
+ }
+
+ filter = addCachingWrapperFilter( filter, def );
+ return filter;
+ }
+
+ /**
+ * Decides whether to wrap the given filter around a <code>CachingWrapperFilter<code>.
+ *
+ * @param filter the filter which maybe gets wrapped.
+ * @param def The filter definition used to decide whether wrapping should occur or not.
+ *
+ * @return The original filter or wrapped filter depending on the information extracted from
+ * <code>def</code>.
+ */
+ private Filter addCachingWrapperFilter(Filter filter, FilterDef def) {
+ if ( cacheResults( def.getCacheMode() ) ) {
+ int cachingWrapperFilterSize = searchFactoryImplementor.getFilterCacheBitResultsSize();
+ filter = new org.hibernate.search.filter.CachingWrapperFilter( filter, cachingWrapperFilterSize );
+ }
+
+ return filter;
+ }
+
+ private FilterKey createFilterKey(FilterDef def, Object instance) {
+ FilterKey key = null;
+ if ( !cacheInstance( def.getCacheMode() ) ) {
+ return key; // if the filter is not cached there is no key!
+ }
+
+ if ( def.getKeyMethod() == null ) {
+ key = new FilterKey() {
+ public int hashCode() {
+ return getImpl().hashCode();
+ }
+
+ public boolean equals(Object obj) {
+ if ( !( obj instanceof FilterKey ) ) {
+ return false;
+ }
+ FilterKey that = ( FilterKey ) obj;
+ return this.getImpl().equals( that.getImpl() );
+ }
+ };
+ }
+ else {
+ try {
+ key = ( FilterKey ) def.getKeyMethod().invoke( instance );
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException(
+ "Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
+ }
+ catch ( InvocationTargetException e ) {
+ throw new SearchException(
+ "Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
+ }
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "@Key method does not return FilterKey: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
+ }
+ }
+ key.setImpl( def.getImpl() );
+
+ //Make sure Filters are isolated by filter def name
+ StandardFilterKey wrapperKey = new StandardFilterKey();
+ wrapperKey.addParameter( def.getName() );
+ wrapperKey.addParameter( key );
+ return wrapperKey;
+ }
+
+ private Object createFilterInstance(FullTextFilterImpl fullTextFilter,
+ FilterDef def) {
+ Object instance;
+ try {
+ instance = def.getImpl().newInstance();
+ }
+ catch ( InstantiationException e ) {
+ throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
+ }
+ for ( Map.Entry<String, Object> entry : fullTextFilter.getParameters().entrySet() ) {
+ def.invoke( entry.getKey(), instance, entry.getValue() );
+ }
+ if ( cacheInstance( def.getCacheMode() ) && def.getKeyMethod() == null && fullTextFilter.getParameters()
+ .size() > 0 ) {
+ throw new SearchException( "Filter with parameters and no @Key method: " + fullTextFilter.getName() );
+ }
+ return instance;
+ }
+
+ private org.apache.lucene.search.Query filterQueryByClasses(org.apache.lucene.search.Query luceneQuery) {
+ if ( !needClassFilterClause ) {
+ return luceneQuery;
+ }
+ else {
+ //A query filter is more practical than a manual class filtering post query (esp on scrollable resultsets)
+ //it also probably minimise the memory footprint
+ BooleanQuery classFilter = new BooleanQuery();
+ //annihilate the scoring impact of DocumentBuilderIndexedEntity.CLASS_FIELDNAME
+ classFilter.setBoost( 0 );
+ for ( Class clazz : classesAndSubclasses ) {
+ Term t = new Term( DocumentBuilder.CLASS_FIELDNAME, clazz.getName() );
+ TermQuery termQuery = new TermQuery( t );
+ classFilter.add( termQuery, BooleanClause.Occur.SHOULD );
+ }
+ BooleanQuery filteredQuery = new BooleanQuery();
+ filteredQuery.add( luceneQuery, BooleanClause.Occur.MUST );
+ filteredQuery.add( classFilter, BooleanClause.Occur.MUST );
+ return filteredQuery;
+ }
+ }
+
+ private int max(int first, int totalHits) {
+ if ( maxResults == null ) {
+ return totalHits - 1;
+ }
+ else {
+ return maxResults + first < totalHits ?
+ first + maxResults - 1 :
+ totalHits - 1;
+ }
+ }
+
+ private int first() {
+ return firstResult != null ?
+ firstResult :
+ 0;
+ }
+
+ /**
+ * Build the index searcher for this fulltext query.
+ *
+ * @param searchFactoryImplementor the search factory.
+ *
+ * @return the <code>IndexSearcher</code> for this query (can be <code>null</code>.
+ * TODO change classesAndSubclasses by side effect, which is a mismatch with the Searcher return, fix that.
+ */
+ private IndexSearcher buildSearcher(SearchFactoryImplementor searchFactoryImplementor) {
+ Map<Class<?>, DocumentBuilderIndexedEntity<?>> builders = searchFactoryImplementor.getDocumentBuildersIndexedEntities();
+ List<DirectoryProvider> targetedDirectories = new ArrayList<DirectoryProvider>();
+ Set<String> idFieldNames = new HashSet<String>();
+
+ Similarity searcherSimilarity = null;
+ //TODO check if caching this work for the last n list of indexedTargetedEntities makes a perf boost
+ if ( indexedTargetedEntities.size() == 0 ) {
+ // empty indexedTargetedEntities array means search over all indexed enities,
+ // but we have to make sure there is at least one
+ if ( builders.isEmpty() ) {
+ throw new HibernateException(
+ "There are no mapped entities. Don't forget to add @Indexed to at least one class."
+ );
+ }
+
+ for ( DocumentBuilderIndexedEntity builder : builders.values() ) {
+ searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
+ if ( builder.getIdKeywordName() != null ) {
+ idFieldNames.add( builder.getIdKeywordName() );
+ allowFieldSelectionInProjection = allowFieldSelectionInProjection && builder.allowFieldSelectionInProjection();
+ }
+ populateDirectories( targetedDirectories, builder );
+ }
+ classesAndSubclasses = null;
+ }
+ else {
+ Set<Class<?>> involvedClasses = new HashSet<Class<?>>( indexedTargetedEntities.size() );
+ involvedClasses.addAll( indexedTargetedEntities );
+ for ( Class<?> clazz : indexedTargetedEntities ) {
+ DocumentBuilderIndexedEntity<?> builder = builders.get( clazz );
+ if ( builder != null ) {
+ involvedClasses.addAll( builder.getMappedSubclasses() );
+ }
+ }
+
+ for ( Class clazz : involvedClasses ) {
+ DocumentBuilderIndexedEntity builder = builders.get( clazz );
+ //TODO should we rather choose a polymorphic path and allow non mapped entities
+ if ( builder == null ) {
+ throw new HibernateException( "Not a mapped entity (don't forget to add @Indexed): " + clazz );
+ }
+ if ( builder.getIdKeywordName() != null ) {
+ idFieldNames.add( builder.getIdKeywordName() );
+ allowFieldSelectionInProjection = allowFieldSelectionInProjection && builder.allowFieldSelectionInProjection();
+ }
+ searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
+ populateDirectories( targetedDirectories, builder );
+ }
+ this.classesAndSubclasses = involvedClasses;
+ }
+ this.idFieldNames = idFieldNames;
+
+ //compute optimization needClassFilterClause
+ //if at least one DP contains one class that is not part of the targeted classesAndSubclasses we can't optimize
+ if ( classesAndSubclasses != null ) {
+ for ( DirectoryProvider dp : targetedDirectories ) {
+ final Set<Class<?>> classesInDirectoryProvider = searchFactoryImplementor.getClassesInDirectoryProvider(
+ dp
+ );
+ // if a DP contains only one class, we know for sure it's part of classesAndSubclasses
+ if ( classesInDirectoryProvider.size() > 1 ) {
+ //risk of needClassFilterClause
+ for ( Class clazz : classesInDirectoryProvider ) {
+ if ( !classesAndSubclasses.contains( clazz ) ) {
+ this.needClassFilterClause = true;
+ break;
+ }
+ }
+ }
+ if ( this.needClassFilterClause ) {
+ break;
+ }
+ }
+ }
+
+ //set up the searcher
+ final DirectoryProvider[] directoryProviders = targetedDirectories.toArray(
+ new DirectoryProvider[targetedDirectories.size()]
+ );
+ IndexSearcher is = new IndexSearcher(
+ searchFactoryImplementor.getReaderProvider().openReader(
+ directoryProviders
+ )
+ );
+ is.setSimilarity( searcherSimilarity );
+ return is;
+ }
+
+ private void populateDirectories(List<DirectoryProvider> directories, DocumentBuilderIndexedEntity builder) {
+ final IndexShardingStrategy indexShardingStrategy = builder.getDirectoryProviderSelectionStrategy();
+ final DirectoryProvider[] directoryProviders;
+ if ( filterDefinitions != null && !filterDefinitions.isEmpty() ) {
+ directoryProviders = indexShardingStrategy.getDirectoryProvidersForQuery(
+ filterDefinitions.values().toArray( new FullTextFilterImplementor[filterDefinitions.size()] )
+ );
+ }
+ else {
+ //no filter get all shards
+ directoryProviders = indexShardingStrategy.getDirectoryProvidersForQuery( EMPTY_FULL_TEXT_FILTER_IMPLEMENTOR );
+ }
+
+ for ( DirectoryProvider provider : directoryProviders ) {
+ if ( !directories.contains( provider ) ) {
+ directories.add( provider );
+ }
+ }
+ }
+
+ private Similarity checkSimilarity(Similarity similarity, DocumentBuilderIndexedEntity builder) {
+ if ( similarity == null ) {
+ similarity = builder.getSimilarity();
+ }
+ else if ( !similarity.getClass().equals( builder.getSimilarity().getClass() ) ) {
+ throw new HibernateException(
+ "Cannot perform search on two entities with differing Similarity implementations (" + similarity.getClass()
+ .getName() + " & " + builder.getSimilarity().getClass().getName() + ")"
+ );
+ }
+
+ return similarity;
+ }
+
+ private void closeSearcher(Searcher searcher, ReaderProvider readerProvider) {
+ Set<IndexReader> indexReaders = getIndexReaders( searcher );
+
+ for ( IndexReader indexReader : indexReaders ) {
+ readerProvider.closeReader( indexReader );
+ }
+ }
+
+ public int getResultSize() {
+ if ( resultSize == null ) {
+ //get result size without object initialization
+ IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
+ if ( searcher == null ) {
+ resultSize = 0;
+ }
+ else {
+ TopDocs hits;
+ try {
+ hits = getQueryHits(
+ searcher, 1
+ ).topDocs; // Lucene enforces that at least one top doc will be retrieved.
+ resultSize = hits.totalHits;
+ }
+ catch ( IOException e ) {
+ throw new HibernateException( "Unable to query Lucene index", e );
+ }
+ finally {
+ //searcher cannot be null
+ try {
+ closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
+ //searchFactoryImplementor.getReaderProvider().closeReader( searcher.getIndexReader() );
+ }
+ catch ( SearchException e ) {
+ log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
+ }
+ }
+ }
+ }
+ return this.resultSize;
+ }
+
+ public FullTextQuery setCriteriaQuery(Criteria criteria) {
+ this.criteria = criteria;
+ return this;
+ }
+
+ public FullTextQuery setProjection(String... fields) {
+ if ( fields == null || fields.length == 0 ) {
+ this.indexProjection = null;
+ }
+ else {
+ this.indexProjection = fields;
+ }
+ return this;
+ }
+
+ public FullTextQuery setFirstResult(int firstResult) {
+ if ( firstResult < 0 ) {
+ throw new IllegalArgumentException( "'first' pagination parameter less than 0" );
+ }
+ this.firstResult = firstResult;
+ return this;
+ }
+
+ public FullTextQuery setMaxResults(int maxResults) {
+ if ( maxResults < 0 ) {
+ throw new IllegalArgumentException( "'max' pagination parameter less than 0" );
+ }
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public FullTextQuery setFetchSize(int fetchSize) {
+ super.setFetchSize( fetchSize );
+ if ( fetchSize <= 0 ) {
+ throw new IllegalArgumentException( "'fetch size' parameter less than or equals to 0" );
+ }
+ this.fetchSize = fetchSize;
+ return this;
+ }
+
+ public Query setLockOptions(LockOptions lockOptions) {
+ throw new UnsupportedOperationException( "Lock options are not implemented in Hibernate Search queries" );
+ }
+
+ @Override
+ public FullTextQuery setResultTransformer(ResultTransformer transformer) {
+ super.setResultTransformer( transformer );
+ this.resultTransformer = transformer;
+ return this;
+ }
+
+ public <T> T unwrap(Class<T> type) {
+ if ( type == org.apache.lucene.search.Query.class) {
+ return (T) luceneQuery;
+ }
+ throw new IllegalArgumentException("Cannot unwrap " + type.getName() );
+ }
+
+ public LockOptions getLockOptions() {
+ throw new UnsupportedOperationException( "Lock options are not implemented in Hibernate Search queries" );
+ }
+
+ public int executeUpdate() throws HibernateException {
+ throw new UnsupportedOperationException( "executeUpdate is not supported in Hibernate Search queries" );
+ }
+
+ public Query setLockMode(String alias, LockMode lockMode) {
+ throw new UnsupportedOperationException( "Lock options are not implemented in Hibernate Search queries" );
+ }
+
+ protected Map getLockModes() {
+ throw new UnsupportedOperationException( "Lock options are not implemented in Hibernate Search queries" );
+ }
+
+ public FullTextFilter enableFullTextFilter(String name) {
+ FullTextFilterImpl filterDefinition = filterDefinitions.get( name );
+ if ( filterDefinition != null ) {
+ return filterDefinition;
+ }
+
+ filterDefinition = new FullTextFilterImpl();
+ filterDefinition.setName( name );
+ FilterDef filterDef = searchFactoryImplementor.getFilterDefinition( name );
+ if ( filterDef == null ) {
+ throw new SearchException( "Unkown @FullTextFilter: " + name );
+ }
+ filterDefinitions.put( name, filterDefinition );
+ return filterDefinition;
+ }
+
+ public void disableFullTextFilter(String name) {
+ filterDefinitions.remove( name );
+ }
+
+ private SearchFactoryImplementor getSearchFactoryImplementor() {
+ if ( searchFactoryImplementor == null ) {
+ searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
+ }
+ return searchFactoryImplementor;
+ }
+
+ private static Loader noLoader = new Loader() {
+ public void init(Session session, SearchFactoryImplementor searchFactoryImplementor) {
+ }
+
+ public Object load(EntityInfo entityInfo) {
+ throw new UnsupportedOperationException( "noLoader should not be used" );
+ }
+
+ public List load(EntityInfo... entityInfos) {
+ throw new UnsupportedOperationException( "noLoader should not be used" );
+ }
+ };
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/FullTextQueryImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/IteratorImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/IteratorImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/IteratorImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,87 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+import org.hibernate.search.engine.Loader;
+import org.hibernate.search.engine.EntityInfo;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//TODO load the next batch-size elements to benefit from batch-size
+public class IteratorImpl implements Iterator {
+
+ private final List<EntityInfo> entityInfos;
+ private int index = 0;
+ private final int size;
+ private Object next;
+ private int nextObjectIndex = -1;
+ private final Loader loader;
+
+ public IteratorImpl(List<EntityInfo> entityInfos, Loader loader) {
+ this.entityInfos = entityInfos;
+ this.size = entityInfos.size();
+ this.loader = loader;
+ }
+
+ //side effect is to set up next
+ public boolean hasNext() {
+ if ( nextObjectIndex == index ) return next != null;
+ next = null;
+ nextObjectIndex = -1;
+ do {
+ if ( index >= size ) {
+ nextObjectIndex = index;
+ next = null;
+ return false;
+ }
+ next = loader.load( entityInfos.get( index ) );
+ if ( next == null ) {
+ index++;
+ }
+ else {
+ nextObjectIndex = index;
+ }
+ }
+ while ( next == null );
+ return true;
+ }
+
+ public Object next() {
+ //hasNext() has side effect
+ if ( !hasNext() ) throw new NoSuchElementException( "Out of boundaries" );
+ index++;
+ return next;
+ }
+
+ public void remove() {
+ //TODO this is theoretically doable
+ throw new UnsupportedOperationException( "Cannot remove from a lucene query iterator" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/IteratorImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/QueryHits.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/QueryHits.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/QueryHits.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,113 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+
+import org.hibernate.search.SearchException;
+
+/**
+ * A helper class which gives access to the current query and its hits. This class will dynamically
+ * reload the underlying <code>TopDocs</code> if required.
+ *
+ * @author Hardy Ferentschik
+ */
+public class QueryHits {
+
+ private static final int DEFAULT_TOP_DOC_RETRIEVAL_SIZE = 100;
+ public final org.apache.lucene.search.Query preparedQuery;
+ public final Searcher searcher;
+ public final Filter filter;
+ public final Sort sort;
+ public final int totalHits;
+ public TopDocs topDocs;
+
+ public QueryHits(Searcher searcher, org.apache.lucene.search.Query preparedQuery, Filter filter, Sort sort)
+ throws IOException {
+ this( searcher, preparedQuery, filter, sort, DEFAULT_TOP_DOC_RETRIEVAL_SIZE );
+ }
+
+ public QueryHits(Searcher searcher, org.apache.lucene.search.Query preparedQuery, Filter filter, Sort sort,
+ Integer n )
+ throws IOException {
+ this.preparedQuery = preparedQuery;
+ this.searcher = searcher;
+ this.filter = filter;
+ this.sort = sort;
+ updateTopDocs( n );
+ totalHits = topDocs.totalHits;
+ }
+
+ public Document doc(int index) throws IOException {
+ return searcher.doc( docId( index ) );
+ }
+
+ public Document doc(int index, FieldSelector selector) throws IOException {
+ return searcher.doc( docId( index ), selector );
+ }
+
+ public ScoreDoc scoreDoc(int index) throws IOException {
+ if ( index >= totalHits ) {
+ throw new SearchException("Not a valid ScoreDoc index: " + index);
+ }
+
+ // TODO - Is there a better way to get more TopDocs? Get more or less?
+ if ( index >= topDocs.scoreDocs.length ) {
+ updateTopDocs( 2 * index );
+ }
+
+ return topDocs.scoreDocs[index];
+ }
+
+ public int docId(int index) throws IOException {
+ return scoreDoc( index ).doc;
+ }
+
+ public float score(int index) throws IOException {
+ return scoreDoc( index ).score;
+ }
+
+ public Explanation explain(int index) throws IOException {
+ return searcher.explain( preparedQuery, docId( index ) );
+ }
+
+ private void updateTopDocs(int n) throws IOException {
+ if ( sort == null ) {
+ topDocs = searcher.search( preparedQuery, filter, n );
+ }
+ else {
+ topDocs = searcher.search( preparedQuery, filter, n, sort );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/QueryHits.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/ScrollableResultsImpl.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/ScrollableResultsImpl.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,494 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+import org.apache.lucene.search.IndexSearcher;
+import org.slf4j.Logger;
+
+import org.hibernate.HibernateException;
+import org.hibernate.ScrollableResults;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.engine.DocumentExtractor;
+import org.hibernate.search.engine.EntityInfo;
+import org.hibernate.search.engine.Loader;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.type.Type;
+
+/**
+ * Implements scrollable and paginated resultsets.
+ * Contrary to Query#iterate() or Query#list(), this implementation is
+ * exposed to returned null objects (if the index is out of date).
+ * <p/>
+ * <p/>
+ * The following methods that change the value of 'current' will check
+ * and set its value to either 'afterLast' or 'beforeFirst' depending
+ * on direction. This is to prevent rogue values from setting it outside
+ * the boundaries of the results.
+ * <ul>
+ * <li>next()</li>
+ * <li>previous()</li>
+ * <li>scroll(i)</li>
+ * <li>last()</li>
+ * <li>first()</li>
+ * </ul>
+ *
+ * @see org.hibernate.Query
+ *
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ * @author Sanne Grinovero
+ */
+public class ScrollableResultsImpl implements ScrollableResults {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private final SearchFactory searchFactory;
+ private final IndexSearcher searcher;
+ private final int first;
+ private final int max;
+ private final int fetchSize;
+ private final Loader loader;
+ private final DocumentExtractor documentExtractor;
+ private final SessionImplementor session;
+
+ /**
+ * Caches result rows and EntityInfo from
+ * <code>first</code> to <code>max</code>
+ */
+ private final LoadedObject[] resultsContext;
+
+ private int current;
+
+ public ScrollableResultsImpl( IndexSearcher searcher, int first, int max, int fetchSize, DocumentExtractor extractor,
+ Loader loader, SearchFactory searchFactory, SessionImplementor sessionImplementor
+ ) {
+ this.searchFactory = searchFactory;
+ this.searcher = searcher;
+ this.first = first;
+ this.max = max;
+ this.loader = loader;
+ this.documentExtractor = extractor;
+ this.fetchSize = fetchSize;
+ this.session = sessionImplementor;
+ int size = Math.max( max - first + 1, 0 );
+ this.resultsContext = new LoadedObject[size];
+ beforeFirst();
+ }
+
+ private LoadedObject ensureCurrentLoaded() {
+ LoadedObject currentCacheRef = resultsContext[current - first];
+ if ( currentCacheRef != null ) {
+ return currentCacheRef;
+ }
+ // the loading window is optimized for scrolling in both directions:
+ int windowStop = Math.min( max + 1 , current + fetchSize );
+ int windowStart = Math.max( first, current - fetchSize + 1 );
+ List<EntityInfo> entityInfosToLoad = new ArrayList<EntityInfo>( fetchSize );
+ int sizeToLoad = 0;
+ for (int x = windowStart; x < windowStop; x++) {
+ int arrayIdx = x - first;
+ LoadedObject lo = resultsContext[arrayIdx];
+ if ( lo == null ) {
+ lo = new LoadedObject();
+ // makes hard references and extract EntityInfos:
+ entityInfosToLoad.add( lo.getEntityInfo( x ) );
+ resultsContext[arrayIdx] = lo;
+ sizeToLoad++;
+ if ( sizeToLoad >= fetchSize )
+ break;
+ }
+ }
+ //preload efficiently by batches:
+ if ( sizeToLoad > 1 ) {
+ loader.load( entityInfosToLoad.toArray( new EntityInfo[sizeToLoad] ) );
+ //(no references stored at this point: they still need to be loaded one by one to inject null results)
+ }
+ return resultsContext[ current - first ];
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public boolean next() {
+ // Increases cursor pointer by one. If this places it >
+ // max + 1 (afterLast) then set it to afterLast and return
+ // false.
+ if ( ++current > max ) {
+ afterLast();
+ return false;
+ }
+ return true;
+ }
+
+ public boolean previous() {
+ // Decreases cursor pointer by one. If this places it <
+ // first - 1 (beforeFirst) then set it to beforeFirst and
+ // return false.
+ if ( --current < first ) {
+ beforeFirst();
+ return false;
+ }
+ return true;
+ }
+
+ public boolean scroll(int i) {
+ // Since we have to take into account that we can scroll any
+ // amount positive or negative, we perform the same tests that
+ // we performed in next() and previous().
+ current = current + i;
+ if ( current > max ) {
+ afterLast();
+ return false;
+ }
+ else if ( current < first ) {
+ beforeFirst();
+ return false;
+ }
+ else {
+ return true;
+ }
+ }
+
+ public boolean last() {
+ current = max;
+ if ( current < first ) {
+ beforeFirst();
+ return false;
+ }
+ return max >= first;
+ }
+
+ public boolean first() {
+ current = first;
+ if ( current > max ) {
+ afterLast();
+ return false;
+ }
+ return max >= first;
+ }
+
+ public void beforeFirst() {
+ current = first - 1;
+ }
+
+ public void afterLast() {
+ current = max + 1;
+ //TODO help gc by clearing all structures when using forwardonly scrollmode.
+ }
+
+ public boolean isFirst() {
+ return current == first;
+ }
+
+ public boolean isLast() {
+ return current == max;
+ }
+
+ public void close() {
+ try {
+ searchFactory.getReaderProvider().closeReader( searcher.getIndexReader() );
+ }
+ catch (SearchException e) {
+ log.warn( "Unable to properly close searcher in ScrollableResults", e );
+ }
+ }
+
+ public Object[] get() throws HibernateException {
+ // don't throw an exception here just
+ // return 'null' this is similar to the
+ // RowSet spec in JDBC. It returns false
+ // (or 0 I can't remember) but we can't
+ // do that since we have to make up for
+ // an Object[]. J.G
+ if ( current < first || current > max ) return null;
+ LoadedObject cacheEntry = ensureCurrentLoaded();
+ return cacheEntry.getManagedResult( current );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Object get(int i) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Type getType(int i) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Integer getInteger(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Long getLong(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Float getFloat(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Boolean getBoolean(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Double getDouble(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Short getShort(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Byte getByte(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Character getCharacter(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public byte[] getBinary(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public String getText(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Blob getBlob(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Clob getClob(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public String getString(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public BigDecimal getBigDecimal(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public BigInteger getBigInteger(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Date getDate(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Locale getLocale(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public Calendar getCalendar(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ /**
+ * This method is not supported on Lucene based queries
+ * @throws UnsupportedOperationException always thrown
+ */
+ public TimeZone getTimeZone(int col) {
+ throw new UnsupportedOperationException( "Lucene does not work on columns" );
+ }
+
+ public int getRowNumber() {
+ if ( max < first ) return -1;
+ return current - first;
+ }
+
+ public boolean setRowNumber(int rowNumber) {
+ if ( rowNumber >= 0 ) {
+ current = first + rowNumber;
+ }
+ else {
+ current = max + rowNumber + 1; //max row start at -1
+ }
+ return current >= first && current <= max;
+ }
+
+ private final class LoadedObject {
+
+ private Reference<Object[]> entity; //never==null but Reference.get can return null
+ private Reference<EntityInfo> einfo; //never==null but Reference.get can return null
+
+ /**
+ * Gets the objects from cache if it is available and attached to session,
+ * or reload them and update the cache entry.
+ * @param x absolute position in fulltext result.
+ * @return the managed objects
+ */
+ private Object[] getManagedResult(int x) {
+ EntityInfo entityInfo = getEntityInfo( x );
+ Object[] objects = entity==null ? null : entity.get();
+ if ( objects!=null && areAllEntitiesManaged( objects, entityInfo ) ) {
+ return objects;
+ }
+ else {
+ Object loaded = loader.load( entityInfo );
+ if ( ! loaded.getClass().isArray() ) loaded = new Object[] { loaded };
+ objects = (Object[]) loaded;
+ this.entity = new SoftReference<Object[]>( objects );
+ return objects;
+ }
+ }
+
+ /**
+ * Extract an entityInfo, either from cache or from the index.
+ * @param x the position in the index.
+ * @return
+ */
+ private EntityInfo getEntityInfo(int x) {
+ EntityInfo entityInfo = einfo==null ? null : einfo.get();
+ if ( entityInfo==null ) {
+ try {
+ entityInfo = documentExtractor.extract( x );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to read Lucene topDocs[" + x + "]", e );
+ }
+ einfo = new SoftReference<EntityInfo>( entityInfo );
+ }
+ return entityInfo;
+ }
+
+ }
+
+ private boolean areAllEntitiesManaged(Object[] objects, EntityInfo entityInfo) {
+ //check if all entities are session-managed and skip the check on projected values
+ org.hibernate.Session hibSession = (org.hibernate.Session) session;
+ if ( entityInfo.projection != null ) {
+ // using projection: test only for entities
+ for ( int idx : entityInfo.indexesOfThis ) {
+ Object o = objects[idx];
+ //TODO improve: is it useful to check for proxies and have them reassociated to persistence context?
+ if ( ! hibSession.contains( o ) )
+ return false;
+ }
+ return true;
+ }
+ else {
+ return hibSession.contains( objects[0] );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/ScrollableResultsImpl.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/AbstractTermQueryBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/AbstractTermQueryBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/AbstractTermQueryBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+/**
+ * Abstract class that can be used to store state and any information that all the various TermQueryBuilder
+ * types might need.
+ *
+ * @author Navin Surtani
+ */
+public abstract class AbstractTermQueryBuilder {
+
+ protected TermQueryBuilderDataStore dataStore;
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/AbstractTermQueryBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BooleanContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BooleanContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BooleanContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,72 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Class that allows users to create BooleanQueries.
+ *
+ * @author Navin Surtani
+ */
+
+//TODO do we want a QueryCreator interface with T extends Query and T createQuery() ?
+public class BooleanContext {
+
+ private BooleanClause.Occur occur;
+ // List has an allocation of 5 temporarily so that it's not created with an arbitrary one.
+ private final List<Query> clauses = new ArrayList<Query>(5);
+
+ public BooleanContext(BooleanClause.Occur occur) {
+ this.occur = occur;
+ }
+
+ public BooleanContext add(Query clause) {
+ clauses.add( clause );
+ return this;
+ }
+
+ public Query createQuery() {
+ BooleanQuery boolQuery = new BooleanQuery();
+ for (Query clause : clauses) {
+ boolQuery.add( clause, occur );
+ }
+ return boolQuery;
+ }
+
+ protected void setOccur(BooleanClause.Occur occur) {
+ this.occur = occur;
+ }
+
+ protected BooleanClause.Occur getOccur() {
+ return occur;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BooleanContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BuildableTermQueryBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BuildableTermQueryBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BuildableTermQueryBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+
+import java.util.List;
+
+/**
+ * Class that will allow the user to actually build his query.
+ *
+ * @author Navin Surtani
+ */
+public class BuildableTermQueryBuilder extends AbstractTermQueryBuilder {
+
+ public BuildableTermQueryBuilder(TermQueryBuilderDataStore dataStore) {
+ this.dataStore = dataStore;
+ }
+
+ public UnbuildableTermQueryBuilderOnSearch on(String field) {
+ return new UnbuildableTermQueryBuilderOnSearch(dataStore, field);
+ }
+
+ public Query build() {
+ // Start by getting the lists of fields and searches.
+ List<Term> terms = dataStore.getTerms();
+
+ //TODO:- This kind of sucks. How can we do this nicely?
+ // Create a TermQuery for the first term.
+ Query tq = new TermQuery(terms.get(0));
+
+ // Now create an array of TermQueries for me to do the combine later on.
+ // The array size will be 1 less than that of the list.
+ TermQuery[] termQueries = new TermQuery[terms.size() - 1];
+
+ // Loop through the rest of the list.
+ for (int i = 1; i<terms.size(); i++){
+ // The index of each newly created TermQuery in the array will always be 1 less than that of the list
+ // This is because the first term in the list has already been dealt with, so the first termQuery in the array
+ // will correspond to the second term from the list.
+
+ termQueries[i - 1] = new TermQuery(terms.get(i));
+ }
+
+ tq = tq.combine(termQueries);
+ return tq;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/BuildableTermQueryBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/NegatableBooleanContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/NegatableBooleanContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/NegatableBooleanContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,72 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.Query;
+
+/**
+ * // TODO: Document this
+ *
+ * @author Navin Surtani
+ */
+public class NegatableBooleanContext {
+
+ private final BooleanContext delegate;
+
+ public NegatableBooleanContext(BooleanClause.Occur occur) {
+ this.delegate = new BooleanContext( occur );
+ }
+
+ public NegatableBooleanContext not() {
+ BooleanClause.Occur present = delegate.getOccur();
+ if ( present == null ) {
+ //assertion exception
+ }
+ else if (present == BooleanClause.Occur.SHOULD) {
+ //assertion exception
+ }
+ else if ( present == BooleanClause.Occur.MUST) {
+ delegate.setOccur(BooleanClause.Occur.MUST_NOT);
+ }
+ else if (present == BooleanClause.Occur.MUST_NOT) {
+ delegate.setOccur(BooleanClause.Occur.MUST);
+ }
+ else {
+ //assert failure
+ }
+ return this;
+ }
+
+ public NegatableBooleanContext add(Query clause) {
+ delegate.add(clause);
+ return this;
+ }
+
+ public Query createQuery() {
+ return delegate.createQuery();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/NegatableBooleanContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/SealedQueryBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/SealedQueryBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/SealedQueryBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.search.BooleanClause;
+
+/**
+ * Starting class that will allow users to build their queries using the DSL.
+ *
+ * //TODO: This needs to be tied into the SearchFactory somehow so that users can actually "access" it.
+ *
+ * @author Navin Surtani
+ */
+public class SealedQueryBuilder {
+
+ public SealedQueryBuilder(){
+
+ }
+
+ public BooleanContext should() {
+ return new BooleanContext(BooleanClause.Occur.SHOULD);
+ }
+
+ public NegatableBooleanContext must(){
+ return new NegatableBooleanContext(BooleanClause.Occur.MUST);
+ }
+
+ public UnbuildableTermQueryBuilderOnField term(){
+ return new UnbuildableTermQueryBuilderOnField();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/SealedQueryBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/TermQueryBuilderDataStore.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/TermQueryBuilderDataStore.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/TermQueryBuilderDataStore.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.index.Term;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * This class will just store the required terms.
+ *
+ * @author Navin Surtani
+ */
+public class TermQueryBuilderDataStore {
+
+ private List<Term> terms;
+
+ public TermQueryBuilderDataStore(){
+ terms = new ArrayList<Term>();
+ }
+
+ public List<Term> getTerms(){
+ return terms;
+ }
+
+ public void addTerm(Term term){
+ terms.add(term);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/TermQueryBuilderDataStore.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnField.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnField.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnField.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,43 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+/**
+ * Class that exposes only the on(String field) method as this class will only be returned to a user when
+ * SealedQueryBuilder.term() is called.
+ *
+ * @author Navin Surtani
+ */
+public class UnbuildableTermQueryBuilderOnField extends AbstractTermQueryBuilder {
+
+ public UnbuildableTermQueryBuilderOnField(){
+ dataStore = new TermQueryBuilderDataStore();
+ }
+
+ public UnbuildableTermQueryBuilderOnSearch on(String field){
+ return new UnbuildableTermQueryBuilderOnSearch(dataStore, field);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnField.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnSearch.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnSearch.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnSearch.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.query.dsl;
+
+import org.apache.lucene.index.Term;
+
+/**
+ * Class that allows users to continue building their TermQueries.
+ * However, a TermQuery cannot be built from an instance of this class, as there is not enough information
+ * to do so.
+ *
+ * @author Navin Surtani
+ */
+public class UnbuildableTermQueryBuilderOnSearch extends AbstractTermQueryBuilder {
+
+ private String field;
+
+ public UnbuildableTermQueryBuilderOnSearch(TermQueryBuilderDataStore dataStore, String field) {
+ this.dataStore = dataStore;
+ this.field = field;
+ }
+
+ public BuildableTermQueryBuilder matches(String search) {
+ // Now that I've got enough information to create a term I can do so
+ Term term = new Term(field, search);
+ dataStore.addTerm(term);
+ // return the Buildable type.
+ return new BuildableTermQueryBuilder(dataStore);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/query/dsl/UnbuildableTermQueryBuilderOnSearch.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/CacheableMultiReader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/CacheableMultiReader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/CacheableMultiReader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,77 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+
+/**
+ * MultiReader ensuring equals returns true if the underlying readers are the same (and in the same order)
+ * Especially useful when using {@link org.apache.lucene.search.CachingWrapperFilter}
+ *
+ * @author Emmanuel Bernard
+ */
+public class CacheableMultiReader extends MultiReader {
+
+ // This is package private as the intention of the Lucene team seems to be to not
+ // expose this publically (it's a protected member in Lucene 2.3)
+ final IndexReader[] subReaders;
+
+ public CacheableMultiReader(IndexReader[] subReaders) {
+ super( subReaders );
+ this.subReaders = subReaders;
+ }
+
+ /**
+ * only available since 2.3
+ */
+ /*
+ public CacheableMultiReader(IndexReader[] subReaders, boolean closeSubReaders) {
+ super( subReaders, closeSubReaders );
+ this.subReaders = subReaders;
+ }
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if ( this == obj ) return true;
+ if ( !( obj instanceof CacheableMultiReader ) ) return false;
+ CacheableMultiReader that = (CacheableMultiReader) obj;
+ int length = this.subReaders.length;
+ if ( length != that.subReaders.length ) return false;
+ for (int index = 0; index < length; index++) {
+ if ( !this.subReaders[index].equals( that.subReaders[index] ) ) return false;
+ }
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = 0;
+ for (Object reader : this.subReaders) {
+ result = 31 * result + reader.hashCode();
+ }
+ return result;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/CacheableMultiReader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/NotSharedReaderProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/NotSharedReaderProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/NotSharedReaderProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,76 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import static org.hibernate.search.reader.ReaderProviderHelper.buildMultiReader;
+import static org.hibernate.search.reader.ReaderProviderHelper.clean;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * Open a reader each time
+ *
+ * @author Emmanuel Bernard
+ */
+public class NotSharedReaderProvider implements ReaderProvider {
+ @SuppressWarnings( { "ThrowableInstanceNeverThrown" } )
+ public IndexReader openReader(DirectoryProvider... directoryProviders) {
+ final int length = directoryProviders.length;
+ IndexReader[] readers = new IndexReader[length];
+ try {
+ for (int index = 0; index < length; index++) {
+ readers[index] = IndexReader.open( directoryProviders[index].getDirectory(), true );
+ }
+ }
+ catch (IOException e) {
+ //TODO more contextual info
+ clean( new SearchException( "Unable to open one of the Lucene indexes", e ), readers );
+ }
+ return buildMultiReader( length, readers );
+ }
+
+
+ @SuppressWarnings( { "ThrowableInstanceNeverThrown" } )
+ public void closeReader(IndexReader reader) {
+ try {
+ reader.close();
+ }
+ catch (IOException e) {
+ //TODO extract subReaders and close each one individually
+ clean( new SearchException( "Unable to close multiReader" ), reader );
+ }
+ }
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ }
+
+ public void destroy() {
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/NotSharedReaderProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Responsible for providing and managing the lifecycle of a read only reader. The implementation must have a
+ * no-arg constructor.
+ * <p/>
+ * Note that the reader must be closed once opened.
+ *
+ * @author Emmanuel Bernard
+ */
+public interface ReaderProvider {
+ /**
+ * Open a read-only reader on all the listed directory providers.
+ * The opened reader has to be closed through {@link #closeReader(IndexReader)}.
+ * The opening can be virtual.
+ */
+ IndexReader openReader(DirectoryProvider... directoryProviders);
+
+ /**
+ * Close a reader previously opened by {@link #openReader}.
+ * The closing can be virtual.
+ */
+ void closeReader(IndexReader reader);
+
+ /**
+ * Inialize the reader provider before its use.
+ */
+ void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor);
+
+ /**
+ * Called when a <code>SearchFactory</code> is destroyed. This method typically releases resources.
+ * It is guaranteed to be executed after readers are released by queries (assuming no user error).
+ */
+ void destroy();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.hibernate.search.Environment;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.PluginLoader;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ReaderProviderFactory {
+
+ private static Properties getProperties(SearchConfiguration cfg) {
+ Properties props = cfg.getProperties();
+ Properties workerProperties = new Properties();
+ for (Map.Entry entry : props.entrySet()) {
+ String key = (String) entry.getKey();
+ if ( key.startsWith( Environment.READER_PREFIX ) ) {
+ workerProperties.setProperty( key, (String) entry.getValue() );
+ }
+ }
+ return workerProperties;
+ }
+
+ public static ReaderProvider createReaderProvider(SearchConfiguration cfg, SearchFactoryImplementor searchFactoryImplementor) {
+ Properties props = getProperties( cfg );
+ String impl = props.getProperty( Environment.READER_STRATEGY );
+ ReaderProvider readerProvider;
+ if ( StringHelper.isEmpty( impl ) ) {
+ //put another one
+ readerProvider = new SharingBufferReaderProvider();
+ }
+ else if ( "not-shared".equalsIgnoreCase( impl ) ) {
+ readerProvider = new NotSharedReaderProvider();
+ }
+ else if ( "shared".equalsIgnoreCase( impl ) ) {
+ readerProvider = new SharingBufferReaderProvider();
+ }
+ //will remove next "else":
+ else if ( "shared-segments".equalsIgnoreCase( impl ) ) {
+ readerProvider = new SharingBufferReaderProvider();
+ }
+ else {
+ readerProvider = PluginLoader.instanceFromName( ReaderProvider.class, impl,
+ ReaderProviderFactory.class, "readerProvider" );
+ }
+ readerProvider.initialize( props, searchFactoryImplementor );
+ return readerProvider;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,151 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searchable;
+import org.hibernate.search.SearchException;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ReaderProviderHelper {
+
+ private static final Field subReadersField = getSubReadersField();
+
+ private static Field getSubReadersField() {
+ try {
+ Field field = MultiReader.class.getDeclaredField( "subReaders" );
+ if ( ! field.isAccessible() ) field.setAccessible( true );
+ return field;
+ }
+ catch (NoSuchFieldException e) {
+ throw new SearchException( "Incompatible version of Lucene: MultiReader.subReaders not available", e );
+ }
+ }
+
+ public static IndexReader[] getSubReadersFromMultiReader(MultiReader parentReader) {
+ try {
+ return (IndexReader[]) subReadersField.get( parentReader );
+ } catch (IllegalAccessException e) {
+ throw new SearchException( "Incompatible version of Lucene: MultiReader.subReaders not accessible", e );
+ }
+ }
+
+ @SuppressWarnings( { "ThrowableInstanceNeverThrown" } )
+ public static IndexReader buildMultiReader(int length, IndexReader[] readers) {
+ if ( length == 0 ) {
+ return null;
+ }
+ else if ( length == 1 ) {
+ //everything should be the same so wrap in an MultiReader
+ //return readers[0];
+ try {
+ return new CacheableMultiReader( readers );
+ }
+ catch (Exception e) {
+ //Lucene 2.2 used to through IOExceptions here
+ clean( new SearchException( "Unable to open a MultiReader", e ), readers );
+ return null; //never happens, but please the compiler
+ }
+ }
+ else {
+ try {
+ return new CacheableMultiReader( readers );
+ }
+ catch (Exception e) {
+ //Lucene 2.2 used to through IOExceptions here
+ clean( new SearchException( "Unable to open a MultiReader", e ), readers );
+ return null; //never happens, but please the compiler
+ }
+ }
+ }
+
+ public static void clean(SearchException e, IndexReader... readers) {
+ for (IndexReader reader : readers) {
+ if ( reader != null ) {
+ try {
+ reader.close();
+ }
+ catch (IOException ee) {
+ //swallow
+ }
+ }
+ }
+ throw e;
+ }
+
+ /**
+ * Find the underlying IndexReaders for the given searchable
+ *
+ * @param searchable The searchable to find the IndexReaders for
+ * @return A list of all base IndexReaders used within this searchable
+ */
+ public static Set<IndexReader> getIndexReaders(Searchable searchable) {
+ Set<IndexReader> readers = new HashSet<IndexReader>();
+ getIndexReadersInternal( readers, searchable );
+ return readers;
+ }
+
+ /**
+ * Find the underlying IndexReaders for the given reader
+ *
+ * @param reader The reader to find the IndexReaders for
+ * @return A list of all base IndexReaders used within this searchable
+ */
+ public static Set<IndexReader> getIndexReaders(IndexReader reader) {
+ Set<IndexReader> readers = new HashSet<IndexReader>();
+ getIndexReadersInternal( readers, reader );
+ return readers;
+ }
+
+ /**
+ * Recursive method should identify all underlying readers for any nested structure of Lucene Searchable or IndexReader
+ *
+ * @param readers The working list of all readers found
+ * @param obj The object to find the readers within
+ */
+ private static void getIndexReadersInternal(Set<IndexReader> readers, Object obj) {
+ if ( obj instanceof MultiSearcher ) {
+ for (Searchable s : ( (MultiSearcher) obj ).getSearchables()) {
+ getIndexReadersInternal( readers, s );
+ }
+ }
+ else if ( obj instanceof IndexSearcher ) {
+ getIndexReadersInternal( readers, ( (IndexSearcher) obj ).getIndexReader() );
+ }
+ else if ( obj instanceof IndexReader ) {
+ readers.add( (IndexReader) obj );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/ReaderProviderHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharedReaderProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharedReaderProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharedReaderProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,392 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.slf4j.Logger;
+
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import static org.hibernate.search.reader.ReaderProviderHelper.buildMultiReader;
+import static org.hibernate.search.reader.ReaderProviderHelper.clean;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Share readers per <code>SearchFactory</code>, reusing them if they are still valid.
+ * This class contains several bugs including HSEARCH-211. Since it is deprecated we are not going to
+ * fix them right now.
+ *
+ * @deprecated replaced by SharingBufferReaderProvider
+ * @author Emmanuel Bernard
+ */
+@Deprecated
+public class SharedReaderProvider implements ReaderProvider {
+ private static final Logger log = LoggerFactory.make();
+
+ /**
+ * nonfair lock. Need to be acquired on indexReader acquisition or release (semaphore)
+ */
+ private final Lock semaphoreIndexReaderLock = new ReentrantLock();
+ /**
+ * non fair list of locks to block per IndexReader only
+ * Locks have to be acquired at least for indexReader retrieval and switch
+ * ie for all activeSearchIndexReaders manipulation
+ * this map is read only after initialization, no need to synchronize
+ */
+ private Map<DirectoryProvider, Lock> perDirectoryProviderManipulationLocks;
+ /**
+ * Contains the active (ie non obsolete IndexReader for a given Directory
+ * There may be no entry (warm up)
+ * <p/>
+ * protected by semaphoreIndexReaderLock
+ */
+ private Map<DirectoryProvider, IndexReader> activeSearchIndexReaders = new HashMap<DirectoryProvider, IndexReader>();
+ /**
+ * contains the semaphore and the directory provider per IndexReader opened
+ * all read / update have to be protected by semaphoreIndexReaderLock
+ */
+ private Map<IndexReader, ReaderData> searchIndexReaderSemaphores = new HashMap<IndexReader, ReaderData>();
+
+ public IndexReader openReader(DirectoryProvider... directoryProviders) {
+ boolean trace = log.isTraceEnabled();
+ int length = directoryProviders.length;
+ IndexReader[] readers = new IndexReader[length];
+ if ( trace ) {
+ log.trace( "Opening IndexReader for directoryProviders: {}", length );
+ }
+
+ for ( int index = 0; index < length; index++ ) {
+ DirectoryProvider directoryProvider = directoryProviders[index];
+ IndexReader reader;
+ Lock directoryProviderLock = perDirectoryProviderManipulationLocks.get( directoryProvider );
+ if ( trace ) {
+ log.trace( "Opening IndexReader from {}", directoryProvider.getDirectory() );
+ }
+ directoryProviderLock.lock(); //needed for same problem as the double-checked locking
+ try {
+ reader = activeSearchIndexReaders.get( directoryProvider );
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ if ( reader == null ) {
+ if ( trace ) {
+ log.trace( "No shared IndexReader, opening a new one: {}", directoryProvider.getDirectory() );
+ }
+ reader = replaceActiveReader( null, directoryProviderLock, directoryProvider, readers );
+ }
+ else {
+ boolean isCurrent;
+ try {
+ isCurrent = reader.isCurrent();
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to read current status of Lucene IndexReader", e );
+ }
+ if ( !isCurrent ) {
+ if ( trace ) {
+ log.trace(
+ "Out of date shared IndexReader found, opening a new one: {}",
+ directoryProvider.getDirectory()
+ );
+ }
+ IndexReader outOfDateReader = reader;
+ reader = replaceActiveReader( outOfDateReader, directoryProviderLock, directoryProvider, readers );
+ }
+ else {
+ if ( trace ) {
+ log.trace( "Valid shared IndexReader: {}" + directoryProvider.getDirectory() );
+ }
+ directoryProviderLock.lock();
+ try {
+ //read the latest active one, the current one could be out of date and closed already
+ //the latest active is guaranteed to be active because it's protected by the dp lock
+ reader = activeSearchIndexReaders.get( directoryProvider );
+ semaphoreIndexReaderLock.lock();
+ try {
+ SharedReaderProvider.ReaderData readerData = searchIndexReaderSemaphores.get( reader );
+ //TODO if readerData is null????
+ readerData.semaphore++;
+ searchIndexReaderSemaphores.put( reader, readerData ); //not necessary
+ if ( trace ) {
+ log.trace( "Semaphore increased: {} for {}", readerData.semaphore, reader );
+ }
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ }
+ }
+ readers[index] = reader;
+ }
+ return buildMultiReader( length, readers );
+ }
+
+ @SuppressWarnings({ "ThrowableInstanceNeverThrown" })
+ private IndexReader replaceActiveReader(IndexReader outOfDateReader, Lock directoryProviderLock, DirectoryProvider directoryProvider, IndexReader[] readers) {
+ boolean trace = log.isTraceEnabled();
+ IndexReader oldReader;
+ boolean closeOldReader = false;
+ boolean closeOutOfDateReader = false;
+ IndexReader reader;
+ /**
+ * Since out of lock protection, can have multiple readers created in //
+ * not worse than NotShared and limit the locking time, hence scalability
+ */
+ try {
+ reader = IndexReader.open( directoryProvider.getDirectory(), true );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to open Lucene IndexReader", e );
+ }
+ directoryProviderLock.lock();
+ try {
+ //since not protected by a lock, other ones can have been added
+ oldReader = activeSearchIndexReaders.put( directoryProvider, reader );
+ semaphoreIndexReaderLock.lock();
+ try {
+ searchIndexReaderSemaphores.put( reader, new ReaderData( 1, directoryProvider ) );
+ if ( trace ) {
+ log.trace( "Semaphore: 1 for {}", reader );
+ }
+ if ( outOfDateReader != null ) {
+ ReaderData readerData = searchIndexReaderSemaphores.get( outOfDateReader );
+ if ( readerData == null ) {
+ closeOutOfDateReader = false; //already removed by another previous thread
+ }
+ else if ( readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( outOfDateReader );
+ closeOutOfDateReader = true;
+ }
+ else {
+ closeOutOfDateReader = false;
+ }
+ }
+
+ if ( oldReader != null && oldReader != outOfDateReader ) {
+ ReaderData readerData = searchIndexReaderSemaphores.get( oldReader );
+ if ( readerData == null ) {
+ log.warn( "Semaphore should not be null" );
+ closeOldReader = true; //TODO should be true or false?
+ }
+ else if ( readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( oldReader );
+ closeOldReader = true;
+ }
+ else {
+ closeOldReader = false;
+ }
+ }
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+ if ( closeOutOfDateReader ) {
+ if ( trace ) {
+ log.trace( "Closing out of date IndexReader {}", outOfDateReader );
+ }
+ try {
+ outOfDateReader.close();
+ }
+ catch ( IOException e ) {
+ clean( new SearchException( "Unable to close Lucene IndexReader", e ), readers );
+ }
+ }
+ if ( closeOldReader ) {
+ if ( trace ) {
+ log.trace( "Closing old IndexReader {}", oldReader );
+ }
+ try {
+ oldReader.close();
+ }
+ catch ( IOException e ) {
+ clean( new SearchException( "Unable to close Lucene IndexReader", e ), readers );
+ }
+ }
+ return reader;
+ }
+
+ public void closeReader(IndexReader reader) {
+ boolean trace = log.isTraceEnabled();
+ if ( reader == null ) {
+ return;
+ }
+ IndexReader[] readers;
+ //TODO should it be CacheableMultiReader? Probably no
+ if ( reader instanceof MultiReader ) {
+ readers = ReaderProviderHelper.getSubReadersFromMultiReader( ( MultiReader ) reader );
+ if ( trace ) {
+ log.trace( "Closing MultiReader: {}", reader );
+ }
+ }
+ else {
+ throw new AssertionFailure( "Everything should be wrapped in a MultiReader" );
+ }
+
+ for ( IndexReader subReader : readers ) {
+ closeInternalReader( trace, subReader, false );
+ }
+ }
+
+ private void closeInternalReader(boolean trace, IndexReader subReader, boolean finalClose) {
+ ReaderData readerData;
+ //TODO can we avoid that lock?
+ semaphoreIndexReaderLock.lock();
+ try {
+ readerData = searchIndexReaderSemaphores.get( subReader );
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+
+ if ( readerData == null ) {
+ log.error( "Trying to close a Lucene IndexReader not present: {}", subReader.directory() );
+ //TODO should we try to close?
+ return;
+ }
+
+ //acquire the locks in the same order as everywhere else
+ Lock directoryProviderLock = perDirectoryProviderManipulationLocks.get( readerData.provider );
+ boolean closeReader = false;
+ directoryProviderLock.lock();
+ try {
+ boolean isActive;
+ isActive = activeSearchIndexReaders.get( readerData.provider ) == subReader;
+ if ( trace ) {
+ log.trace( "Indexreader not active: {}", subReader );
+ }
+ semaphoreIndexReaderLock.lock();
+ try {
+ readerData = searchIndexReaderSemaphores.get( subReader );
+ if ( readerData == null ) {
+ log.error( "Trying to close a Lucene IndexReader not present: {}" + subReader.directory() );
+ //TODO should we try to close?
+ return;
+ }
+
+ //final close, the semaphore should be at 0 already
+ if ( !finalClose ) {
+ readerData.semaphore--;
+ if ( trace ) {
+ log.trace( "Semaphore decreased to: {} for {}", readerData.semaphore, subReader );
+ }
+ }
+
+ if ( readerData.semaphore < 0 ) {
+ log.error( "Semaphore negative: {}", subReader.directory() );
+ }
+ if ( ( !isActive ) && readerData.semaphore == 0 ) {
+ searchIndexReaderSemaphores.remove( subReader );
+ closeReader = true;
+ }
+ else {
+ closeReader = false;
+ }
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+ }
+ finally {
+ directoryProviderLock.unlock();
+ }
+
+ if ( closeReader ) {
+ if ( trace ) {
+ log.trace( "Closing IndexReader: {}", subReader );
+ }
+ try {
+ subReader.close();
+ }
+ catch ( IOException e ) {
+ log.warn( "Unable to close Lucene IndexReader", e );
+ }
+ }
+ }
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ Set<DirectoryProvider<?>> providers = searchFactoryImplementor.getDirectoryProviders();
+ perDirectoryProviderManipulationLocks = new HashMap<DirectoryProvider, Lock>( providers.size() );
+ for ( DirectoryProvider dp : providers ) {
+ perDirectoryProviderManipulationLocks.put( dp, new ReentrantLock() );
+ }
+ perDirectoryProviderManipulationLocks = Collections.unmodifiableMap( perDirectoryProviderManipulationLocks );
+ }
+
+ public void destroy() {
+ boolean trace = log.isTraceEnabled();
+ IndexReader[] readers;
+ semaphoreIndexReaderLock.lock();
+ try {
+ //release active readers
+ activeSearchIndexReaders.clear();
+ readers = searchIndexReaderSemaphores.keySet()
+ .toArray( new IndexReader[searchIndexReaderSemaphores.size()] );
+ }
+ finally {
+ semaphoreIndexReaderLock.unlock();
+ }
+
+ for ( IndexReader reader : readers ) {
+ closeInternalReader( trace, reader, true );
+ }
+
+ if ( searchIndexReaderSemaphores.size() != 0 ) {
+ log.warn( "ReaderProvider contains readers not properly closed at destroy time" );
+ }
+
+ }
+
+ private static class ReaderData {
+
+ public ReaderData(int semaphore, DirectoryProvider provider) {
+ this.semaphore = semaphore;
+ this.provider = provider;
+ }
+
+ public int semaphore;
+ public DirectoryProvider provider;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharedReaderProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharingBufferReaderProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharingBufferReaderProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharingBufferReaderProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,293 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.reader;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.store.Directory;
+import org.slf4j.Logger;
+
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * This <code>ReaderProvider</code> shares IndexReaders as long as they are "current";
+ * main difference with SharedReaderProvider is the way to update the Readers when needed:
+ * this uses IndexReader.reopen() which should improve performance on larger indexes
+ * as it shares buffers with previous IndexReader generation for the segments which didn't change.
+ *
+ * @author Sanne Grinovero
+ */
+public class SharingBufferReaderProvider implements ReaderProvider {
+
+ private static final Logger log = LoggerFactory.make();
+
+ /**
+ * contains all Readers (most current per Directory and all unclosed old readers)
+ */
+ //TODO ConcurrentHashMap's constructor could benefit from some hints as arguments.
+ protected final Map<IndexReader, ReaderUsagePair> allReaders = new ConcurrentHashMap<IndexReader, ReaderUsagePair>();
+
+ /**
+ * contains last updated Reader; protected by lockOnOpenLatest (in the values)
+ */
+ protected final Map<Directory, PerDirectoryLatestReader> currentReaders = new ConcurrentHashMap<Directory, PerDirectoryLatestReader>();
+
+ public void closeReader(IndexReader multiReader) {
+ if ( multiReader == null ) {
+ return;
+ }
+ IndexReader[] readers;
+ if ( multiReader instanceof MultiReader ) {
+ readers = ReaderProviderHelper.getSubReadersFromMultiReader( ( MultiReader ) multiReader );
+ }
+ else {
+ throw new AssertionFailure( "Everything should be wrapped in a MultiReader" );
+ }
+ log.debug( "Closing MultiReader: {}", multiReader );
+ for ( IndexReader reader : readers ) {
+ ReaderUsagePair container = allReaders.get( reader );
+ container.close(); //virtual
+ }
+ log.trace( "IndexReader closed." );
+ }
+
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ Set<DirectoryProvider<?>> providers = searchFactoryImplementor.getDirectoryProviders();
+
+ // create the readers for the known providers. Unfortunately, it is not possible to
+ // create all readers in initialize since some providers have more than one directory (eg
+ // FSSlaveDirectoryProvider). See also HSEARCH-250.
+ for ( DirectoryProvider provider : providers ) {
+ createReader( provider.getDirectory() );
+ }
+ }
+
+ /**
+ * Thread safe creation of <code>PerDirectoryLatestReader</code>.
+ *
+ * @param directory The Lucene directory for which to create the reader.
+ * @return either the cached instance for the specified <code>Directory</code> or a newly created one.
+ * @see <a href="http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-250">HSEARCH-250</a>
+ */
+ private synchronized PerDirectoryLatestReader createReader(Directory directory) {
+ PerDirectoryLatestReader reader = currentReaders.get( directory );
+ if ( reader != null ) {
+ return reader;
+ }
+
+ try {
+ reader = new PerDirectoryLatestReader( directory );
+ currentReaders.put( directory, reader );
+ return reader;
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to open Lucene IndexReader", e );
+ }
+ }
+
+ public void destroy() {
+ IndexReader[] readers = allReaders.keySet().toArray( new IndexReader[allReaders.size()] );
+ for ( IndexReader reader : readers ) {
+ ReaderUsagePair usage = allReaders.get( reader );
+ usage.close();
+ }
+
+ if ( allReaders.size() != 0 ) {
+ log.warn( "ReaderProvider contains readers not properly closed at destroy time" );
+ }
+ }
+
+ public IndexReader openReader(DirectoryProvider... directoryProviders) {
+ int length = directoryProviders.length;
+ IndexReader[] readers = new IndexReader[length];
+ log.debug( "Opening IndexReader for directoryProviders: {}", length );
+ for ( int index = 0; index < length; index++ ) {
+ Directory directory = directoryProviders[index].getDirectory();
+ log.trace( "Opening IndexReader from {}", directory );
+ PerDirectoryLatestReader directoryLatestReader = currentReaders.get( directory );
+ if ( directoryLatestReader == null ) { // might eg happen for FSSlaveDirectoryProvider
+ directoryLatestReader = createReader( directory );
+ }
+ readers[index] = directoryLatestReader.refreshAndGet();
+ }
+ // don't use ReaderProviderHelper.buildMultiReader as we need our own cleanup.
+ if ( length == 0 ) {
+ return null;
+ }
+ else {
+ try {
+ return new CacheableMultiReader( readers );
+ }
+ catch ( Exception e ) {
+ //Lucene 2.2 used to throw IOExceptions here
+ for ( IndexReader ir : readers ) {
+ ReaderUsagePair readerUsagePair = allReaders.get( ir );
+ readerUsagePair.close();
+ }
+ throw new SearchException( "Unable to open a MultiReader", e );
+ }
+ }
+ }
+
+ //overridable method for testability:
+ protected IndexReader readerFactory(final Directory directory) throws IOException {
+ return IndexReader.open( directory, true );
+ }
+
+ /**
+ * Container for the couple IndexReader,UsageCounter.
+ */
+ protected final class ReaderUsagePair {
+
+ public final IndexReader reader;
+ /**
+ * When reaching 0 (always test on change) the reader should be really
+ * closed and then discarded.
+ * Starts at 2 because:
+ * first usage token is artificial: means "current" is not to be closed (+1)
+ * additionally when creating it will be used (+1)
+ */
+ protected final AtomicInteger usageCounter = new AtomicInteger( 2 );
+
+ ReaderUsagePair(IndexReader r) {
+ reader = r;
+ }
+
+ /**
+ * Closes the <code>IndexReader</code> if no other resource is using it
+ * in which case the reference to this container will also be removed.
+ */
+ public void close() {
+ int refCount = usageCounter.decrementAndGet();
+ if ( refCount == 0 ) {
+ //TODO I've been experimenting with the idea of an async-close: didn't appear to have an interesting benefit,
+ //so discarded the code. should try with bigger indexes to see if the effect gets more impressive.
+ ReaderUsagePair removed = allReaders.remove( reader );//remove ourself
+ try {
+ reader.close();
+ }
+ catch ( IOException e ) {
+ log.warn( "Unable to close Lucene IndexReader", e );
+ }
+ assert removed != null;
+ }
+ else if ( refCount < 0 ) {
+ //doesn't happen with current code, could help spotting future bugs?
+ throw new AssertionFailure(
+ "Closing an IndexReader for which you didn't own a lock-token, or somebody else which didn't own closed already."
+ );
+ }
+ }
+
+ public String toString() {
+ return "Reader:" + this.hashCode() + " ref.count=" + usageCounter.get();
+ }
+
+ }
+
+ /**
+ * An instance for each DirectoryProvider,
+ * establishing the association between "current" ReaderUsagePair
+ * for a DirectoryProvider and it's lock.
+ */
+ protected final class PerDirectoryLatestReader {
+
+ /**
+ * Reference to the most current IndexReader for a DirectoryProvider;
+ * guarded by lockOnReplaceCurrent;
+ */
+ public ReaderUsagePair current; //guarded by lockOnReplaceCurrent
+ private final Lock lockOnReplaceCurrent = new ReentrantLock();
+
+ /**
+ * @param directory The <code>Directory</code> for which we manage the <code>IndexReader</code>.
+ *
+ * @throws IOException when the index initialization fails.
+ */
+ public PerDirectoryLatestReader(Directory directory) throws IOException {
+ IndexReader reader = readerFactory( directory );
+ ReaderUsagePair initialPair = new ReaderUsagePair( reader );
+ initialPair.usageCounter.set( 1 );//a token to mark as active (preventing real close).
+ lockOnReplaceCurrent.lock();//no harm, just ensuring safe publishing.
+ current = initialPair;
+ lockOnReplaceCurrent.unlock();
+ allReaders.put( reader, initialPair );
+ }
+
+ /**
+ * Gets an updated IndexReader for the current Directory;
+ * the index status will be checked.
+ *
+ * @return the current IndexReader if it's in sync with underlying index, a new one otherwise.
+ */
+ public IndexReader refreshAndGet() {
+ ReaderUsagePair previousCurrent;
+ IndexReader updatedReader;
+ lockOnReplaceCurrent.lock();
+ try {
+ IndexReader beforeUpdateReader = current.reader;
+ try {
+ updatedReader = beforeUpdateReader.reopen();
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to reopen IndexReader", e );
+ }
+ if ( beforeUpdateReader == updatedReader ) {
+ previousCurrent = null;
+ current.usageCounter.incrementAndGet();
+ }
+ else {
+ ReaderUsagePair newPair = new ReaderUsagePair( updatedReader );
+ //no need to increment usageCounter in newPair, as it is constructed with correct number 2.
+ assert newPair.usageCounter.get() == 2;
+ previousCurrent = current;
+ current = newPair;
+ allReaders.put( updatedReader, newPair );//unfortunately still needs lock
+ }
+ }
+ finally {
+ lockOnReplaceCurrent.unlock();
+ }
+ // doesn't need lock:
+ if ( previousCurrent != null ) {
+ previousCurrent.close();// release a token as it's not the current any more.
+ }
+ return updatedReader;
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/reader/SharingBufferReaderProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextManager.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextManager.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextManager.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,106 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.sandbox.standalone;
+
+import org.hibernate.search.*;
+
+import java.io.Serializable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface FullTextManager {
+
+ /**
+ * Returns the entity instance of a given type and id value
+ */
+ public <T> T get(Class<T> entityType, Serializable id);
+
+ /**
+ * Create a fulltext query on top of a native Lucene query returning the matching objects
+ * of type <code>entities</code> and their respective subclasses.
+ *
+ * @param luceneQuery The native Lucene query to be rn against the Lucene index.
+ * @param entities List of classes for type filtering. The query result will only return entities of
+ * the specified types and their respective subtype. If no class is specified no type filtering will take place.
+ *
+ * @return A <code>FullTextQuery</code> wrapping around the native Lucene query.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class<?>... entities);
+
+ /**
+ * Force the (re)indexing of a given <b>managed</b> object.
+ * Indexation is batched per transaction: if a transaction is active, the operation
+ * will not affect the index at least until commit.
+ *
+ * @param entity The entity to index - must not be <code>null</code>.
+ *
+ * @throws IllegalArgumentException if entity is null or not an @Indexed entity
+ */
+ <T> void index(T entity);
+
+ /**
+ * @return the <code>SearchFactory</code> instance.
+ */
+ SearchFactory getSearchFactory();
+
+ /**
+ * Remove the entity with the type <code>entityType</code> and the identifier <code>id</code> from the index.
+ * If <code>id == null</code> all indexed entities of this type and its indexed subclasses are deleted. In this
+ * case this method behaves like {@link #purgeAll(Class)}.
+ *
+ * @param entityType The type of the entity to delete.
+ * @param id The id of the entity to delete.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purge(Class<T> entityType, Serializable id);
+
+ /**
+ * Remove all entities from of particular class and all its subclasses from the index.
+ *
+ * @param entityType The class of the entities to remove.
+ *
+ * @throws IllegalArgumentException if entityType is <code>null</code> or not a class or superclass annotated with <code>@Indexed</code>.
+ */
+ public <T> void purgeAll(Class<T> entityType);
+
+ /**
+ * Flush all index changes forcing Hibernate Search to apply all changes to the index not waiting for the batch limit.
+ */
+ public void flushToIndexes();
+
+ //FIXME add support for mass indexer: does it even makes sense?
+ /**
+ * Creates a MassIndexer to rebuild the indexes of some
+ * or all indexed entity types.
+ * Instances cannot be reused.
+ * @param types optionally restrict the operation to selected types
+ * @return
+ */
+ //public MassIndexer createIndexer(Class<?>... types);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextManager.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextQuery.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextQuery.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextQuery.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,34 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.sandbox.standalone;
+
+import java.util.List;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface FullTextQuery {
+ List<?> list();
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/FullTextQuery.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/InstanceTransactionContext.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/InstanceTransactionContext.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/InstanceTransactionContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,104 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.sandbox.standalone;
+
+import org.hibernate.search.backend.TransactionContext;
+
+import javax.transaction.Synchronization;
+import javax.transaction.Status;
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * Transaction context that contains transaction boundaries methods.
+ * While not "transactional" it allows to call Synchronization elements
+ *
+ * @author Emmanuel Bernard
+ */
+public class InstanceTransactionContext implements TransactionContext {
+ private State transactionState = State.NO_TRANSACTION;
+ private final List<Synchronization> synchronizations = new ArrayList<Synchronization>(5);
+
+ public void beginTransaction() {
+ if (transactionState != State.NO_TRANSACTION) {
+ throw new IllegalStateException( "Transaction context already started: " + transactionState);
+ }
+ transactionState = State.IN_TRANSACTION;
+ }
+
+ public void commit() {
+ if ( transactionState != State.IN_TRANSACTION ) {
+ throw new IllegalStateException( "Transaction context not in active state: " + transactionState);
+ }
+ try {
+ for (Synchronization sync : synchronizations) {
+ sync.beforeCompletion();
+ }
+ for (Synchronization sync : synchronizations) {
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+ finally {
+ synchronizations.clear();
+ transactionState = State.TRANSACTION_CLOSED;
+ }
+ }
+
+ public void rollback() {
+ if ( transactionState != State.IN_TRANSACTION ) {
+ throw new IllegalStateException( "Transaction context not in active state: " + transactionState);
+ }
+ try {
+ for (Synchronization sync : synchronizations) {
+ sync.beforeCompletion();
+ }
+ for (Synchronization sync : synchronizations) {
+ sync.afterCompletion( Status.STATUS_ROLLEDBACK );
+ }
+ }
+ finally {
+ synchronizations.clear();
+ transactionState = State.TRANSACTION_CLOSED;
+ }
+ }
+
+ public boolean isTransactionInProgress() {
+ return transactionState == State.IN_TRANSACTION;
+ }
+
+ public Object getTransactionIdentifier() {
+ return this;
+ }
+
+ public void registerSynchronization(Synchronization synchronization) {
+ synchronizations.add( synchronization );
+ }
+
+ private static enum State {
+ NO_TRANSACTION,
+ IN_TRANSACTION,
+ TRANSACTION_CLOSED
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/InstanceTransactionContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/LuceneFullTextManager.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/LuceneFullTextManager.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/LuceneFullTextManager.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,159 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.sandbox.standalone;
+
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+import java.io.Serializable;
+import java.util.Set;
+import java.util.List;
+
+/**
+ * Implements a standalone full text service.
+ * Data is stored in Lucene
+ *
+ * @author Emmanuel Bernard
+ */
+public class LuceneFullTextManager implements FullTextManager {
+ private final SearchFactoryImplementor searchFactory;
+ private final InstanceTransactionContext transactionContext;
+
+ LuceneFullTextManager(SearchFactoryImplementor sfi) {
+ this.searchFactory = sfi;
+ this.transactionContext = new InstanceTransactionContext();
+ transactionContext.beginTransaction();
+ }
+
+ public <T> T get(Class<T> entityType, Serializable id) {
+ final DocumentBuilderIndexedEntity<?> docBuilder = searchFactory.getDocumentBuilderIndexedEntity( entityType );
+ if ( docBuilder == null ) {
+ String msg = "Entity to retrueve is not an @Indexed entity: " + entityType.getClass().getName();
+ throw new IllegalArgumentException( msg );
+ }
+ if (id == null) {
+ throw new IllegalArgumentException( "Identifier cannot be null" );
+ }
+ Query luceneQuery = new TermQuery( docBuilder.getTerm( id ) );
+ FullTextQuery searchQuery = createFullTextQuery( luceneQuery, entityType );
+ List<?> results = searchQuery.list();
+ if (results.size() > 1) {
+ //TODO find correct exception
+ throw new SearchException("Several entities with he same id found: " + entityType + "#" + id);
+ }
+ @SuppressWarnings( "unchecked" )
+ final T result = (T) ( results.size() == 0 ? null : results.get( 0 ) );
+ return result;
+ }
+
+ public FullTextQuery createFullTextQuery(Query luceneQuery, Class<?>... entities) {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+
+
+ /**
+ * (Re-)index an entity.
+ * The entity must be associated with the session and non indexable entities are ignored.
+ *
+ * @param entity The entity to index - must not be <code>null</code>.
+ *
+ * @throws IllegalArgumentException if entity is null or not an @Indexed entity
+ */
+ public <T> void index(T entity) {
+ if ( entity == null ) {
+ throw new IllegalArgumentException( "Entity to index should not be null" );
+ }
+
+ Class<?> clazz = getClass( entity );
+ //TODO cache that at the FTSession level
+ //not strictly necessary but a small optimization
+ final DocumentBuilderIndexedEntity<?> docBuilder = searchFactory.getDocumentBuilderIndexedEntity( clazz );
+ if ( docBuilder == null ) {
+ String msg = "Entity to index is not an @Indexed entity: " + entity.getClass().getName();
+ throw new IllegalArgumentException( msg );
+ }
+ Serializable id = docBuilder.getId( entity );
+ Work<T> work = new Work<T>( entity, id, WorkType.INDEX );
+ searchFactory.getWorker().performWork( work, transactionContext );
+
+ //TODO
+ //need to add elements in a queue kept at the Session level
+ //the queue will be processed by a Lucene(Auto)FlushEventListener
+ //note that we could keep this queue somewhere in the event listener in the mean time but that requires
+ //a synchronized hashmap holding this queue on a per session basis plus some session house keeping (yuk)
+ //another solution would be to subclass SessionImpl instead of having this LuceneSession delegation model
+ //this is an open discussion
+ }
+
+ private Class<?> getClass(Object entity) {
+ return entity.getClass();
+ }
+
+ public SearchFactory getSearchFactory() {
+ return searchFactory;
+ }
+
+ public <T> void purge(Class<T> entityType, Serializable id) {
+ if ( entityType == null ) {
+ return;
+ }
+
+ Set<Class<?>> targetedClasses = searchFactory.getIndexedTypesPolymorphic( new Class[] {entityType} );
+ if ( targetedClasses.isEmpty() ) {
+ String msg = entityType.getName() + " is not an indexed entity or a subclass of an indexed entity";
+ throw new IllegalArgumentException( msg );
+ }
+
+ for ( Class<?> clazz : targetedClasses ) {
+ if ( id == null ) {
+ createAndPerformWork( clazz, null, WorkType.PURGE_ALL );
+ }
+ else {
+ createAndPerformWork( clazz, id, WorkType.PURGE );
+ }
+ }
+ }
+
+ private <T> void createAndPerformWork(Class<T> clazz, Serializable id, WorkType workType) {
+ Work<T> work;
+ work = new Work<T>( clazz, id, workType );
+ searchFactory.getWorker().performWork( work, transactionContext );
+ }
+
+ public <T> void purgeAll(Class<T> entityType) {
+ purge( entityType, null );
+ }
+
+ public void flushToIndexes() {
+ searchFactory.getWorker().flushWorks( transactionContext );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/sandbox/standalone/LuceneFullTextManager.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.util.Properties;
+
+import org.apache.lucene.store.Directory;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+
+/**
+ * Set up and provide a Lucene <code>Directory</code>
+ * <code>equals()</code> and <code>hashCode()</code> must guaranty equality
+ * between two providers pointing to the same underlying Lucene Store.
+ * Besides that, hashCode ordering is used to avoid deadlock when locking a directory provider.
+ *
+ * This class must be thread safe regarding <code>getDirectory()</code> calls
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public interface DirectoryProvider<TDirectory extends Directory> {
+ /**
+ * get the information to initialize the directory and build its hashCode/equals method
+ */
+ void initialize(String directoryProviderName, Properties properties, SearchFactoryImplementor searchFactoryImplementor);
+
+ /**
+ * Executed after initialize, this method set up the heavy process of starting up the DirectoryProvider
+ * IO processing as well as background processing are expected to be set up here
+ *
+ */
+ void start();
+
+ /**
+ * Executed when the search factory is closed. This method should stop any background process as well as
+ * releasing any resource.
+ * This method should avoid raising exceptions and log potential errors instead
+ */
+ void stop();
+
+ /**
+ * Returns an initialized Lucene Directory. This method call <b>must</b> be threadsafe
+ */
+ TDirectory getDirectory();
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,275 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.search.Environment;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.backend.LuceneIndexingParameters;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.search.backend.configuration.MaskedProperty;
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.optimization.IncrementalOptimizerStrategy;
+import org.hibernate.search.store.optimization.NoOpOptimizerStrategy;
+import org.hibernate.search.store.optimization.OptimizerStrategy;
+import org.hibernate.search.util.PluginLoader;
+import org.hibernate.util.StringHelper;
+
+/**
+ * Create a Lucene directory provider which can be configured
+ * through the following properties:
+ * <ul>
+ * <li><i>hibernate.search.default.*</i></li>
+ * <li><i>hibernate.search.<indexname>.*</i>,</li>
+ * </ul>where <i><indexname></i> properties have precedence over default ones.
+ * <p/>
+ * The implementation is described by
+ * <i>hibernate.search.[default|indexname].directory_provider</i>.
+ * If none is defined the default value is FSDirectory.
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+public class DirectoryProviderFactory {
+
+ private final List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>();
+
+ private static final String SHARDING_STRATEGY = "sharding_strategy";
+ private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
+
+ public DirectoryProviders createDirectoryProviders(XClass entity, SearchConfiguration cfg,
+ SearchFactoryImplementor searchFactoryImplementor,
+ ReflectionManager reflectionManager) {
+ //get properties
+ String directoryProviderName = getDirectoryProviderName( entity, cfg );
+ Properties[] indexProps = getDirectoryProperties( cfg, directoryProviderName );
+
+ //set up the directories
+ int nbrOfProviders = indexProps.length;
+ DirectoryProvider[] providers = new DirectoryProvider[nbrOfProviders];
+ for (int index = 0; index < nbrOfProviders; index++) {
+ String providerName = nbrOfProviders > 1 ?
+ directoryProviderName + "." + index :
+ directoryProviderName;
+ providers[index] = createDirectoryProvider( providerName, indexProps[index],
+ reflectionManager.toClass( entity ), searchFactoryImplementor );
+ }
+
+ //define sharding strategy
+ IndexShardingStrategy shardingStrategy;
+ //any indexProperty will do, the indexProps[0] surely exists.
+ String shardingStrategyName = indexProps[0].getProperty( SHARDING_STRATEGY );
+ if ( shardingStrategyName == null ) {
+ if ( indexProps.length == 1 ) {
+ shardingStrategy = new NotShardedStrategy();
+ }
+ else {
+ shardingStrategy = new IdHashShardingStrategy();
+ }
+ }
+ else {
+ shardingStrategy = PluginLoader.instanceFromName( IndexShardingStrategy.class,
+ shardingStrategyName, DirectoryProviderFactory.class, "IndexShardingStrategy" );
+ }
+ shardingStrategy.initialize(
+ new MaskedProperty( indexProps[0], SHARDING_STRATEGY ), providers );
+ return new DirectoryProviders( shardingStrategy, providers );
+ }
+
+ public void startDirectoryProviders() {
+ for (DirectoryProvider provider : providers) {
+ provider.start();
+ }
+ }
+
+ private DirectoryProvider<?> createDirectoryProvider(String directoryProviderName, Properties indexProps,
+ Class entity, SearchFactoryImplementor searchFactoryImplementor) {
+ String className = indexProps.getProperty( "directory_provider" );
+ DirectoryProvider<?> provider;
+ if ( StringHelper.isEmpty( className ) ) {
+ provider = new FSDirectoryProvider();
+ }
+ else {
+ provider = PluginLoader.instanceFromName( DirectoryProvider.class, className,
+ DirectoryProviderFactory.class, "directory provider" );
+ }
+ try {
+ provider.initialize( directoryProviderName, indexProps, searchFactoryImplementor );
+ }
+ catch (Exception e) {
+ throw new SearchException( "Unable to initialize directory provider: " + directoryProviderName, e );
+ }
+ int index = providers.indexOf( provider );
+ boolean exclusiveIndexUsage = isExclusiveIndexUsageEnabled( directoryProviderName, indexProps );
+ if ( index != -1 ) {
+ //share the same Directory provider for the same underlying store
+ final DirectoryProvider<?> directoryProvider = providers.get( index );
+ searchFactoryImplementor.addClassToDirectoryProvider( entity, directoryProvider, exclusiveIndexUsage);
+ return directoryProvider;
+ }
+ else {
+ configureOptimizerStrategy( searchFactoryImplementor, indexProps, provider );
+ configureIndexingParameters( searchFactoryImplementor, indexProps, provider );
+ providers.add( provider );
+ searchFactoryImplementor.addClassToDirectoryProvider( entity, provider, exclusiveIndexUsage );
+ return provider;
+ }
+ }
+
+ private void configureOptimizerStrategy(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider) {
+ boolean incremental = indexProps.containsKey( "optimizer.operation_limit.max" )
+ || indexProps.containsKey( "optimizer.transaction_limit.max" );
+ OptimizerStrategy optimizerStrategy;
+ if ( incremental ) {
+ optimizerStrategy = new IncrementalOptimizerStrategy();
+ optimizerStrategy.initialize( provider, indexProps, searchFactoryImplementor );
+ }
+ else {
+ optimizerStrategy = new NoOpOptimizerStrategy();
+ }
+ searchFactoryImplementor.addOptimizerStrategy( provider, optimizerStrategy );
+ }
+
+ /**
+ * Creates a new <code>LuceneIndexingParameters</code> instance for the specified provider.
+ * If there are no matching properties in the configuration default values will be applied.
+ * <p>
+ * NOTE:</br>
+ * If a non batch value is set in the configuration apply it also to the
+ * batch mode. This covers the case where users only specify
+ * parameters for the non batch mode. In this case the same parameters apply for
+ * batch indexing. Parameters are found "depth-first": if a batch parameter is set
+ * in a global scope it will take priority on local transaction parameters.
+ * </p>
+ *
+ * @param searchFactoryImplementor the search factory.
+ * @param directoryProperties The properties extracted from the configuration.
+ * @param provider The directory provider for which to configure the indexing parameters.
+ */
+ private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor,
+ Properties directoryProperties, DirectoryProvider<?> provider) {
+ LuceneIndexingParameters indexingParams = new LuceneIndexingParameters( directoryProperties );
+ searchFactoryImplementor.addIndexingParameters( provider, indexingParams );
+ }
+
+ /**
+ * Returns an array of directory properties
+ * Properties are defaulted. For a given property name,
+ * hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search.default
+ * If the Index is not sharded, a single Properties is returned
+ * If the index is sharded, the Properties index matches the shard index
+ */
+ private static Properties[] getDirectoryProperties(SearchConfiguration cfg, String directoryProviderName) {
+ Properties rootCfg = new MaskedProperty( cfg.getProperties(), "hibernate.search" );
+ Properties globalProperties = new MaskedProperty( rootCfg, "default" );
+ Properties directoryLocalProperties = new MaskedProperty( rootCfg, directoryProviderName, globalProperties );
+ final String shardsCountValue = directoryLocalProperties.getProperty( NBR_OF_SHARDS );
+ if ( shardsCountValue == null ) {
+ // no shards: finished.
+ return new Properties[] { directoryLocalProperties };
+ }
+ else {
+ // count shards
+ int shardsCount = ConfigurationParseHelper.parseInt( shardsCountValue, shardsCountValue + " is not a number" );
+ // create shard-specific Props
+ Properties[] shardLocalProperties = new Properties[shardsCount];
+ for (int i = 0; i < shardsCount; i++) {
+ shardLocalProperties[i] = new MaskedProperty(
+ directoryLocalProperties, Integer.toString( i ), directoryLocalProperties );
+ }
+ return shardLocalProperties;
+ }
+ }
+
+ private static String getDirectoryProviderName(XClass clazz, SearchConfiguration cfg) {
+ ReflectionManager reflectionManager = cfg.getReflectionManager();
+ if ( reflectionManager == null ) {
+ reflectionManager = new JavaReflectionManager();
+ }
+ //get the most specialized (ie subclass > superclass) non default index name
+ //if none extract the name from the most generic (superclass > subclass) @Indexed class in the hierarchy
+ //FIXME I'm inclined to get rid of the default value
+ Class aClass = cfg.getClassMapping( clazz.getName() );
+ XClass rootIndex = null;
+ do {
+ XClass currentClazz = reflectionManager.toXClass( aClass );
+ Indexed indexAnn = currentClazz.getAnnotation( Indexed.class );
+ if ( indexAnn != null ) {
+ if ( indexAnn.index().length() != 0 ) {
+ return indexAnn.index();
+ }
+ else {
+ rootIndex = currentClazz;
+ }
+ }
+ aClass = aClass.getSuperclass();
+ }
+ while ( aClass != null );
+ //there is nobody out there with a non default @Indexed.index
+ if ( rootIndex != null ) {
+ return rootIndex.getName();
+ }
+ else {
+ throw new SearchException(
+ "Trying to extract the index name from a non @Indexed class: " + clazz.getName() );
+ }
+ }
+
+ public static class DirectoryProviders {
+ private final IndexShardingStrategy shardingStrategy;
+ private final DirectoryProvider[] providers;
+
+ public DirectoryProviders(IndexShardingStrategy shardingStrategy, DirectoryProvider[] providers) {
+ this.shardingStrategy = shardingStrategy;
+ this.providers = providers;
+ }
+
+ public IndexShardingStrategy getSelectionStrategy() {
+ return shardingStrategy;
+ }
+
+ public DirectoryProvider[] getProviders() {
+ return providers;
+ }
+ }
+
+ private static boolean isExclusiveIndexUsageEnabled(String directoryProviderName, Properties indexProps) {
+ String exclusiveIndexUsageProperty = indexProps.getProperty( Environment.EXCLUSIVE_INDEX_USE, "false" );
+ boolean exclusiveIndexUsage = ConfigurationParseHelper.parseBoolean( exclusiveIndexUsageProperty,
+ "Illegal value for property " + Environment.EXCLUSIVE_INDEX_USE + " on index " + directoryProviderName );
+ return exclusiveIndexUsage;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,259 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.NativeFSLockFactory;
+import org.apache.lucene.store.NoLockFactory;
+import org.apache.lucene.store.SimpleFSLockFactory;
+import org.apache.lucene.store.SingleInstanceLockFactory;
+import org.slf4j.Logger;
+
+import org.hibernate.util.StringHelper;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.util.PluginLoader;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class DirectoryProviderHelper {
+
+ private static final Logger log = LoggerFactory.make();
+ private static final String ROOTINDEX_PROP_NAME = "sourceBase";
+ private static final String RELATIVEINDEX_PROP_NAME = "source";
+ public static final String COPYBUFFERSIZE_PROP_NAME = "buffer_size_on_copy";
+
+ /**
+ * Build a directory name out of a root and relative path, guessing the significant part
+ * and checking for the file availability
+ * @param directoryProviderName
+ * @param properties
+ * @param needWritePermissions when true the directory will be tested for read-write permissions.
+ * @return The file representing the source directory
+ */
+ public static File getSourceDirectory( String directoryProviderName, Properties properties, boolean needWritePermissions ) {
+ String root = properties.getProperty( ROOTINDEX_PROP_NAME );
+ String relative = properties.getProperty( RELATIVEINDEX_PROP_NAME );
+ File sourceDirectory;
+ if ( log.isTraceEnabled() ) {
+ log.trace(
+ "Guess source directory from {} {} and {} {}", new Object[] {
+ ROOTINDEX_PROP_NAME,
+ ( root != null ? root : "<null>" ),
+ RELATIVEINDEX_PROP_NAME,
+ (relative != null ? relative : "<null>") }
+ );
+ }
+ if ( relative == null ) relative = directoryProviderName;
+ if ( StringHelper.isEmpty( root ) ) {
+ log.debug( "No root directory, go with relative " + relative );
+ sourceDirectory = new File( relative );
+ if ( ! sourceDirectory.isDirectory() ) { // this also tests for existence
+ throw new SearchException( "Unable to read source directory: " + relative );
+ }
+ //else keep source as it
+ }
+ else {
+ File rootDir = new File( root );
+ makeSanityCheckedDirectory( rootDir, directoryProviderName, needWritePermissions );
+ sourceDirectory = new File( root, relative );
+ makeSanityCheckedDirectory( sourceDirectory, directoryProviderName, needWritePermissions );
+ log.debug( "Got directory from root + relative" );
+ }
+ return sourceDirectory;
+ }
+
+ /**
+ * Creates an FSDirectory in provided directory and initializes
+ * an index if not already existing.
+ * @param indexDir The directory where to write a new index
+ * @return the created FSDirectory
+ * @throws IOException
+ */
+ public static FSDirectory createFSIndex(File indexDir, Properties dirConfiguration) throws IOException {
+ LockFactory lockFactory = createLockFactory(indexDir, dirConfiguration);
+ FSDirectory fsDirectory = FSDirectory.open( indexDir, null );
+ // must use the setter (instead of using the constructor) to set the lockFactory, or Lucene will
+ // throw an exception if it's different than a previous setting.
+ fsDirectory.setLockFactory( lockFactory );
+ if ( ! IndexReader.indexExists( fsDirectory ) ) {
+ log.debug( "Initialize index: '{}'", indexDir.getAbsolutePath() );
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( fsDirectory, new SimpleAnalyzer(), true, fieldLength );
+ iw.close();
+ }
+ return fsDirectory;
+ }
+
+ /**
+ * Creates a LockFactory as selected in the configuration for the
+ * DirectoryProvider.
+ * The SimpleFSLockFactory and NativeFSLockFactory need a File to know
+ * where to stock the filesystem based locks; other implementations
+ * ignore this parameter.
+ * @param indexDir the directory to use to store locks, if needed by implementation
+ * @param dirConfiguration the configuration of current DirectoryProvider
+ * @return the LockFactory as configured, or a SimpleFSLockFactory
+ * in case of configuration errors or as a default.
+ * @throws IOException
+ */
+ public static LockFactory createLockFactory(File indexDir, Properties dirConfiguration) {
+ //For FS-based indexes default to "simple", default to "single" otherwise.
+ String defaultStrategy = indexDir==null ? "single" : "simple";
+ String lockFactoryName = dirConfiguration.getProperty( "locking_strategy", defaultStrategy );
+ if ( "simple".equals( lockFactoryName ) ) {
+ if ( indexDir==null ) {
+ throw new SearchException( "To use \"simple\" as a LockFactory strategy an indexBase path must be set");
+ }
+ try {
+ return new SimpleFSLockFactory( indexDir );
+ } catch (IOException e) {
+ throw new SearchException( "Could not initialize SimpleFSLockFactory", e);
+ }
+ }
+ else if ( "native".equals( lockFactoryName ) ) {
+ if ( indexDir==null ) {
+ throw new SearchException( "To use \"native\" as a LockFactory strategy an indexBase path must be set");
+ }
+ try {
+ return new NativeFSLockFactory( indexDir );
+ } catch (IOException e) {
+ throw new SearchException( "Could not initialize NativeFSLockFactory", e);
+ }
+ }
+ else if ( "single".equals( lockFactoryName ) ) {
+ return new SingleInstanceLockFactory();
+ }
+ else if ( "none".equals( lockFactoryName ) ) {
+ return new NoLockFactory();
+ }
+ else {
+ LockFactoryFactory lockFactoryFactory = PluginLoader.instanceFromName( LockFactoryFactory.class,
+ lockFactoryName, DirectoryProviderHelper.class, "locking_strategy" );
+ return lockFactoryFactory.createLockFactory( indexDir, dirConfiguration );
+ }
+ }
+
+ /**
+ * Verify the index directory exists and is writable,
+ * or creates it if not existing.
+ * @param annotatedIndexName The index name declared on the @Indexed annotation
+ * @param properties The properties may override the indexname.
+ * @param verifyIsWritable Verify the directory is writable
+ * @return the File representing the Index Directory
+ * @throws SearchException
+ */
+ public static File getVerifiedIndexDir(String annotatedIndexName, Properties properties, boolean verifyIsWritable) {
+ String indexBase = properties.getProperty( "indexBase", "." );
+ String indexName = properties.getProperty( "indexName", annotatedIndexName );
+ File baseIndexDir = new File( indexBase );
+ makeSanityCheckedDirectory( baseIndexDir, indexName, verifyIsWritable );
+ File indexDir = new File( baseIndexDir, indexName );
+ makeSanityCheckedDirectory( indexDir, indexName, verifyIsWritable );
+ return indexDir;
+ }
+
+ /**
+ * @param directory The directory to create or verify
+ * @param indexName To label exceptions
+ * @param verifyIsWritable Verify the directory is writable
+ * @throws SearchException
+ */
+ private static void makeSanityCheckedDirectory(File directory, String indexName, boolean verifyIsWritable) {
+ if ( ! directory.exists() ) {
+ log.warn( "Index directory not found, creating: '" + directory.getAbsolutePath() + "'" );
+ //if not existing, create the full path
+ if ( ! directory.mkdirs() ) {
+ throw new SearchException( "Unable to create index directory: "
+ + directory.getAbsolutePath() + " for index "
+ + indexName );
+ }
+ }
+ else {
+ // else check it is not a file
+ if ( ! directory.isDirectory() ) {
+ throw new SearchException( "Unable to initialize index: "
+ + indexName + ": "
+ + directory.getAbsolutePath() + " is a file." );
+ }
+ }
+ // and ensure it's writable
+ if ( verifyIsWritable && ( ! directory.canWrite() ) ) {
+ throw new SearchException( "Cannot write into index directory: "
+ + directory.getAbsolutePath() + " for index "
+ + indexName );
+ }
+ }
+
+ static long getRefreshPeriod(Properties properties, String directoryProviderName) {
+ String refreshPeriod = properties.getProperty( "refresh", "3600" );
+ long period;
+ try {
+ period = Long.parseLong( refreshPeriod );
+ } catch (NumberFormatException nfe) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName +"; refresh period is not numeric.", nfe );
+ }
+ log.debug( "Refresh period: {} seconds", period );
+ return period * 1000; //per second
+ }
+
+ /**
+ * Users may configure the number of MB to use as
+ * "chunk size" for large file copy operations performed
+ * by DirectoryProviders.
+ * @param directoryProviderName
+ * @param properties
+ * @return the number of Bytes to use as "chunk size" in file copy operations.
+ */
+ public static long getCopyBufferSize(String directoryProviderName, Properties properties) {
+ String value = properties.getProperty( COPYBUFFERSIZE_PROP_NAME );
+ long size = FileHelper.DEFAULT_COPY_BUFFER_SIZE;
+ if ( value != null ) {
+ try {
+ size = Long.parseLong( value ) * 1024 * 1024; //from MB to B.
+ } catch (NumberFormatException nfe) {
+ throw new SearchException( "Unable to initialize index " +
+ directoryProviderName +"; "+ COPYBUFFERSIZE_PROP_NAME + " is not numeric.", nfe );
+ }
+ if ( size <= 0 ) {
+ throw new SearchException( "Unable to initialize index " +
+ directoryProviderName +"; "+ COPYBUFFERSIZE_PROP_NAME + " needs to be greater than zero.");
+ }
+ }
+ return size;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/DirectoryProviderHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSDirectoryProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSDirectoryProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSDirectoryProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,109 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.store.FSDirectory;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Use a Lucene {@link FSDirectory}. The base directory is represented by the property <i>hibernate.search.default.indexBase</i>
+ * or <i>hibernate.search.<index>.indexBase</i>. The former defines the default base directory for all indexes whereas the
+ * latter allows to override the base directory on a per index basis.<i> <index></i> has to be replaced with the fully qualified
+ * classname of the indexed class or the value of the <i>index</i> property of the <code>@Indexed</code> annotation.
+ * <p>
+ * The actual index files are then created in <i><indexBase>/<index name></i>, <i><index name></i> is
+ * per default the name of the indexed entity, or the value of the <i>index</i> property of the <code>@Indexed</code> or can be specified
+ * as property in the configuration file using <i>hibernate.search.<index>.indexName</i>.
+ * </p>
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ * @author Sanne Grinovero
+ */
+public class FSDirectoryProvider implements DirectoryProvider<FSDirectory> {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private FSDirectory directory;
+ private String indexName;
+
+ public void initialize(String directoryProviderName, Properties properties, SearchFactoryImplementor searchFactoryImplementor) {
+ // on "manual" indexing skip read-write check on index directory
+ boolean manual = searchFactoryImplementor.getIndexingStrategy().equals( "manual" );
+ File indexDir = DirectoryProviderHelper.getVerifiedIndexDir( directoryProviderName, properties, ! manual );
+ try {
+ indexName = indexDir.getCanonicalPath();
+ //this is cheap so it's not done in start()
+ directory = DirectoryProviderHelper.createFSIndex( indexDir, properties );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+ }
+
+ public void start() {
+ //all the process is done in initialize
+ }
+
+ public void stop() {
+ try {
+ directory.close();
+ }
+ catch (Exception e) {
+ log.error( "Unable to properly close Lucene directory {}" + directory.getFile(), e );
+ }
+ }
+
+ public FSDirectory getDirectory() {
+ return directory;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) return true;
+ if ( obj == null || !( obj instanceof FSDirectoryProvider ) ) return false;
+ return indexName.equals( ( (FSDirectoryProvider) obj ).indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ int hash = 11;
+ return 37 * hash + indexName.hashCode();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSDirectoryProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSMasterDirectoryProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSMasterDirectoryProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSMasterDirectoryProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,244 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+
+import org.apache.lucene.store.FSDirectory;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * File based DirectoryProvider that takes care of index copy
+ * The base directory is represented by hibernate.search.<index>.indexBase
+ * The index is created in <base directory>/<index name>
+ * The source (aka copy) directory is built from <sourceBase>/<index name>
+ *
+ * A copy is triggered every refresh seconds
+ *
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+//TODO rename copy?
+public class FSMasterDirectoryProvider implements DirectoryProvider<FSDirectory> {
+
+ private static final String CURRENT1 = "current1";
+ private static final String CURRENT2 = "current2";
+ // defined to have CURRENT_DIR_NAME[1] == "current"+"1":
+ private static final String[] CURRENT_DIR_NAME = { null, CURRENT1, CURRENT2 };
+
+ private static final Logger log = LoggerFactory.make();
+ private final Timer timer = new Timer( true ); //daemon thread, the copy algorithm is robust
+
+ private volatile int current;
+
+ //variables having visibility granted by a read of "current"
+ private FSDirectory directory;
+ private String indexName;
+ private SearchFactoryImplementor searchFactory;
+ private long copyChunkSize;
+
+ //variables needed between initialize and start (used by same thread: no special care needed)
+ private File sourceDir;
+ private File indexDir;
+ private String directoryProviderName;
+ private Properties properties;
+
+ public void initialize(String directoryProviderName, Properties properties, SearchFactoryImplementor searchFactoryImplementor) {
+ this.properties = properties;
+ this.directoryProviderName = directoryProviderName;
+ //source guessing
+ sourceDir = DirectoryProviderHelper.getSourceDirectory( directoryProviderName, properties, true );
+ log.debug( "Source directory: {}", sourceDir.getPath() );
+ indexDir = DirectoryProviderHelper.getVerifiedIndexDir( directoryProviderName, properties, true );
+ log.debug( "Index directory: {}", indexDir.getPath() );
+ try {
+ indexName = indexDir.getCanonicalPath();
+ directory = DirectoryProviderHelper.createFSIndex( indexDir, properties );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+ copyChunkSize = DirectoryProviderHelper.getCopyBufferSize( directoryProviderName, properties );
+ this.searchFactory = searchFactoryImplementor;
+ current = 0; //write to volatile to publish all state
+ }
+
+ public void start() {
+ int currentLocal = 0;
+ try {
+ //copy to source
+ if ( new File( sourceDir, CURRENT1 ).exists() ) {
+ currentLocal = 2;
+ }
+ else if ( new File( sourceDir, CURRENT2 ).exists() ) {
+ currentLocal = 1;
+ }
+ else {
+ log.debug( "Source directory for '{}' will be initialized", indexName);
+ currentLocal = 1;
+ }
+ String currentString = Integer.valueOf( currentLocal ).toString();
+ File subDir = new File( sourceDir, currentString );
+ FileHelper.synchronize( indexDir, subDir, true, copyChunkSize );
+ new File( sourceDir, CURRENT1 ).delete();
+ new File( sourceDir, CURRENT2 ).delete();
+ //TODO small hole, no file can be found here
+ new File( sourceDir, CURRENT_DIR_NAME[currentLocal] ).createNewFile();
+ log.debug( "Current directory: {}", currentLocal );
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+ TimerTask task = new FSMasterDirectoryProvider.TriggerTask( indexDir, sourceDir, this );
+ long period = DirectoryProviderHelper.getRefreshPeriod( properties, directoryProviderName );
+ timer.scheduleAtFixedRate( task, period, period );
+ this.current = currentLocal; //write to volatile to publish all state
+ }
+
+ public FSDirectory getDirectory() {
+ @SuppressWarnings("unused")
+ int readCurrentState = current; //Unneeded value, needed to ensure visibility of state protected by memory barrier
+ return directory;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) return true;
+ if ( obj == null || !( obj instanceof FSMasterDirectoryProvider ) ) return false;
+ FSMasterDirectoryProvider other = (FSMasterDirectoryProvider)obj;
+ //break both memory barriers by reading volatile variables:
+ @SuppressWarnings("unused")
+ int readCurrentState = other.current;
+ readCurrentState = this.current;
+ return indexName.equals( other.indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ @SuppressWarnings("unused")
+ int readCurrentState = current; //Unneeded value, to ensure visibility of state protected by memory barrier
+ int hash = 11;
+ return 37 * hash + indexName.hashCode();
+ }
+
+ public void stop() {
+ @SuppressWarnings("unused")
+ int readCurrentState = current; //Another unneeded value, to ensure visibility of state protected by memory barrier
+ timer.cancel();
+ try {
+ directory.close();
+ }
+ catch (Exception e) {
+ log.error( "Unable to properly close Lucene directory {}" + directory.getFile(), e );
+ }
+ }
+
+ private class TriggerTask extends TimerTask {
+
+ private final Executor executor;
+ private final FSMasterDirectoryProvider.CopyDirectory copyTask;
+
+ public TriggerTask(File source, File destination, DirectoryProvider<FSDirectory> directoryProvider) {
+ executor = Executors.newSingleThreadExecutor();
+ copyTask = new FSMasterDirectoryProvider.CopyDirectory( source, destination, directoryProvider );
+ }
+
+ public void run() {
+ if ( copyTask.inProgress.compareAndSet( false, true ) ) {
+ executor.execute( copyTask );
+ }
+ else {
+ log.info( "Skipping directory synchronization, previous work still in progress: {}", indexName );
+ }
+ }
+ }
+
+ private class CopyDirectory implements Runnable {
+ private final File source;
+ private final File destination;
+ private final AtomicBoolean inProgress = new AtomicBoolean( false );
+ private final Lock directoryProviderLock;
+
+ public CopyDirectory(File source, File destination, DirectoryProvider<FSDirectory> directoryProvider) {
+ this.source = source;
+ this.destination = destination;
+ this.directoryProviderLock = searchFactory.getDirectoryProviderLock( directoryProvider );
+ }
+
+ public void run() {
+ //TODO get rid of current and use the marker file instead?
+ directoryProviderLock.lock();
+ try {
+ long start = System.currentTimeMillis();//keep time after lock is acquired for correct measure
+ int oldIndex = current;
+ int index = oldIndex == 1 ? 2 : 1;
+ File destinationFile = new File( destination, Integer.valueOf(index).toString() );
+ try {
+ log.trace( "Copying {} into {}", source, destinationFile );
+ FileHelper.synchronize( source, destinationFile, true, copyChunkSize );
+ current = index;
+ }
+ catch (IOException e) {
+ //don't change current
+ log.error( "Unable to synchronize source of " + indexName, e );
+ return;
+ }
+ if ( ! new File( destination, CURRENT_DIR_NAME[oldIndex] ).delete() ) {
+ log.warn( "Unable to remove previous marker file from source of {}", indexName );
+ }
+ try {
+ new File( destination, CURRENT_DIR_NAME[index] ).createNewFile();
+ }
+ catch( IOException e ) {
+ log.warn( "Unable to create current marker in source of " + indexName, e );
+ }
+ log.trace( "Copy for {} took {} ms", indexName, (System.currentTimeMillis() - start) );
+ }
+ finally {
+ directoryProviderLock.unlock();
+ inProgress.set( false );
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSMasterDirectoryProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,317 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.lucene.store.FSDirectory;
+import org.slf4j.Logger;
+
+import org.hibernate.AssertionFailure;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * File based directory provider that takes care of getting a version of the index
+ * from a given source.
+ * The base directory is represented by hibernate.search.<index>.indexBase
+ * The index is created in <base directory>/<index name>
+ * The source (aka copy) directory is built from <sourceBase>/<index name>
+ * <p/>
+ * A copy is triggered every refresh seconds
+ *
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class FSSlaveDirectoryProvider implements DirectoryProvider<FSDirectory> {
+
+ private static final Logger log = LoggerFactory.make();
+ private final Timer timer = new Timer( true ); //daemon thread, the copy algorithm is robust
+
+ private volatile int current; //used also as memory barrier of all other values, which are set once.
+
+ //variables having visibility granted by a read of "current"
+ private FSDirectory directory1;
+ private FSDirectory directory2;
+ private String indexName;
+ private long copyChunkSize;
+
+ //variables needed between initialize and start (used by same thread: no special care needed)
+ private File sourceIndexDir;
+ private File indexDir;
+ private String directoryProviderName;
+ private Properties properties;
+
+ public void initialize(String directoryProviderName, Properties properties, SearchFactoryImplementor searchFactoryImplementor) {
+ this.properties = properties;
+ this.directoryProviderName = directoryProviderName;
+ //source guessing
+ sourceIndexDir = DirectoryProviderHelper.getSourceDirectory( directoryProviderName, properties, false );
+ if ( !new File( sourceIndexDir, "current1" ).exists() && !new File( sourceIndexDir, "current2" ).exists() ) {
+ throw new IllegalStateException( "No current marker in source directory" );
+ }
+ log.debug( "Source directory: {}", sourceIndexDir.getPath() );
+ indexDir = DirectoryProviderHelper.getVerifiedIndexDir( directoryProviderName, properties, true );
+ log.debug( "Index directory: {}", indexDir.getPath() );
+ try {
+ indexName = indexDir.getCanonicalPath();
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+ copyChunkSize = DirectoryProviderHelper.getCopyBufferSize( directoryProviderName, properties );
+ current = 0; //publish all state to other threads
+ }
+
+ public void start() {
+ int readCurrentState = current; //Unneeded value, but ensure visibility of state protected by memory barrier
+ int currentToBe = 0;
+ try {
+ directory1 = DirectoryProviderHelper.createFSIndex( new File( indexDir, "1" ), properties );
+ directory2 = DirectoryProviderHelper.createFSIndex( new File( indexDir, "2" ), properties );
+ File currentMarker = new File( indexDir, "current1" );
+ File current2Marker = new File( indexDir, "current2" );
+ if ( currentMarker.exists() ) {
+ currentToBe = 1;
+ if ( current2Marker.exists() ) {
+ current2Marker.delete(); //TODO or throw an exception?
+ }
+ }
+ else if ( current2Marker.exists() ) {
+ currentToBe = 2;
+ }
+ else {
+ //no default
+ log.debug( "Setting directory 1 as current" );
+ currentToBe = 1;
+ File destinationFile = new File( indexDir, Integer.valueOf( currentToBe ).toString() );
+ int sourceCurrent;
+ if ( new File( sourceIndexDir, "current1" ).exists() ) {
+ sourceCurrent = 1;
+ }
+ else if ( new File( sourceIndexDir, "current2" ).exists() ) {
+ sourceCurrent = 2;
+ }
+ else {
+ throw new AssertionFailure( "No current file marker found in source directory: " + sourceIndexDir.getPath() );
+ }
+ try {
+ FileHelper.synchronize(
+ new File( sourceIndexDir, String.valueOf( sourceCurrent ) ),
+ destinationFile, true, copyChunkSize
+ );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to synchronize directory: " + indexName, e );
+ }
+ if ( !currentMarker.createNewFile() ) {
+ throw new SearchException( "Unable to create the directory marker file: " + indexName );
+ }
+ }
+ log.debug( "Current directory: {}", currentToBe );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to initialize index: " + directoryProviderName, e );
+ }
+ TimerTask task = new TriggerTask( sourceIndexDir, indexDir );
+ long period = DirectoryProviderHelper.getRefreshPeriod( properties, directoryProviderName );
+ timer.scheduleAtFixedRate( task, period, period );
+ this.current = currentToBe;
+ }
+
+ public FSDirectory getDirectory() {
+ int readState = current;// to have the read consistent in the next two "if"s.
+ if ( readState == 1 ) {
+ return directory1;
+ }
+ else if ( readState == 2 ) {
+ return directory2;
+ }
+ else {
+ throw new AssertionFailure( "Illegal current directory: " + readState );
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) {
+ return true;
+ }
+ if ( obj == null || !( obj instanceof FSSlaveDirectoryProvider ) ) {
+ return false;
+ }
+ FSSlaveDirectoryProvider other = ( FSSlaveDirectoryProvider ) obj;
+ //need to break memory barriers on both instances:
+ @SuppressWarnings("unused")
+ int readCurrentState = this.current; //unneeded value, but ensure visibility of indexName
+ readCurrentState = other.current; //another unneeded value, but ensure visibility of indexName
+ return indexName.equals( other.indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ @SuppressWarnings("unused")
+ int readCurrentState = current; //unneded value, but ensure visibility of indexName
+ int hash = 11;
+ return 37 * hash + indexName.hashCode();
+ }
+
+ class TriggerTask extends TimerTask {
+
+ private final Executor executor;
+ private final CopyDirectory copyTask;
+
+ public TriggerTask(File sourceIndexDir, File destination) {
+ executor = Executors.newSingleThreadExecutor();
+ copyTask = new CopyDirectory( sourceIndexDir, destination );
+ }
+
+ public void run() {
+ if ( copyTask.inProgress.compareAndSet( false, true ) ) {
+ executor.execute( copyTask );
+ }
+ else {
+ if ( log.isTraceEnabled() ) {
+ @SuppressWarnings("unused")
+ int unneeded = current;//ensure visibility of indexName in Timer threads.
+ log.trace( "Skipping directory synchronization, previous work still in progress: {}", indexName );
+ }
+ }
+ }
+ }
+
+ class CopyDirectory implements Runnable {
+ private final File source;
+ private final File destination;
+ private final AtomicBoolean inProgress = new AtomicBoolean( false );
+
+ public CopyDirectory(File sourceIndexDir, File destination) {
+ this.source = sourceIndexDir;
+ this.destination = destination;
+ }
+
+ public void run() {
+ long start = System.currentTimeMillis();
+ try {
+ File sourceFile = determineCurrentSourceFile();
+ if ( sourceFile == null ) {
+ log.error( "Unable to determine current in source directory" );
+ return;
+ }
+
+ // check whether a copy is needed at all
+ File currentDestinationFile = new File( destination, Integer.valueOf( current ).toString() );
+ try {
+ if ( FileHelper.areInSync( sourceFile, currentDestinationFile ) ) {
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Source and destination directory are in sync. No copying required." );
+ }
+ return;
+ }
+ }
+ catch ( IOException ioe ) {
+ log.warn( "Unable to compare {} with {}.", sourceFile.getName(), currentDestinationFile.getName() );
+ }
+
+ // copy is required
+ int oldIndex = current;
+ int index = oldIndex == 1 ? 2 : 1;
+ File destinationFile = new File( destination, Integer.valueOf( index ).toString() );
+ try {
+ log.trace( "Copying {} into {}", sourceFile, destinationFile );
+ FileHelper.synchronize( sourceFile, destinationFile, true, copyChunkSize );
+ current = index;
+ log.trace( "Copy for {} took {} ms", indexName, ( System.currentTimeMillis() - start ) );
+ }
+ catch ( IOException e ) {
+ //don't change current
+ log.error( "Unable to synchronize " + indexName, e );
+ return;
+ }
+ if ( !new File( indexName, "current" + oldIndex ).delete() ) {
+ log.warn( "Unable to remove previous marker file in " + indexName );
+ }
+ try {
+ new File( indexName, "current" + index ).createNewFile();
+ }
+ catch ( IOException e ) {
+ log.warn( "Unable to create current marker file in " + indexName, e );
+ }
+ }
+ finally {
+ inProgress.set( false );
+ }
+ }
+
+ /**
+ * @return Return a file to the currently active source directory. Tests for the files "current1" and
+ * "current2" in order to determine which is the current directory. If there marker file does not exists
+ * <code>null</code> is returned.
+ */
+ private File determineCurrentSourceFile() {
+ File sourceFile = null;
+ if ( new File( source, "current1" ).exists() ) {
+ sourceFile = new File( source, "1" );
+ }
+ else if ( new File( source, "current2" ).exists() ) {
+ sourceFile = new File( source, "2" );
+ }
+ return sourceFile;
+ }
+ }
+
+ public void stop() {
+ @SuppressWarnings("unused")
+ int readCurrentState = current; //unneded value, but ensure visibility of state protected by memory barrier
+ timer.cancel();
+ try {
+ directory1.close();
+ }
+ catch ( Exception e ) {
+ log.error( "Unable to properly close Lucene directory {}" + directory1.getFile(), e );
+ }
+ try {
+ directory2.close();
+ }
+ catch ( Exception e ) {
+ log.error( "Unable to properly close Lucene directory {}" + directory2.getFile(), e );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IdHashShardingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IdHashShardingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IdHashShardingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.util.Properties;
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+
+import org.hibernate.search.filter.FullTextFilterImplementor;
+
+/**
+ * This implementation use idInString as the hashKey.
+ *
+ * @author Emmanuel Bernard
+ */
+public class IdHashShardingStrategy implements IndexShardingStrategy {
+
+ private DirectoryProvider<?>[] providers;
+ public void initialize(Properties properties, DirectoryProvider<?>[] providers) {
+ this.providers = providers;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForAllShards() {
+ return providers;
+ }
+
+ public DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document) {
+ return providers[ hashKey(idInString) ];
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString) {
+ if ( idInString == null ) return providers;
+ return new DirectoryProvider[] { providers[hashKey( idInString )] };
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] fullTextFilters) {
+ return getDirectoryProvidersForAllShards();
+ }
+
+ private int hashKey(String key) {
+ // reproduce the hashCode implementation of String as documented in the javadoc
+ // to be safe cross Java version (in case it changes some day)
+ int hash = 0;
+ int length = key.length();
+ for ( int index = 0; index < length; index++ ) {
+ hash = 31 * hash + key.charAt( index );
+ }
+ return Math.abs( hash % providers.length );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IdHashShardingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IndexShardingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IndexShardingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IndexShardingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.Serializable;
+import java.util.Properties;
+
+import org.apache.lucene.document.Document;
+
+import org.hibernate.search.filter.FullTextFilterImplementor;
+
+/**
+ * Defines how a given virtual index shards data into different DirectoryProviders
+ *
+ * @author Emmanuel Bernard
+ */
+public interface IndexShardingStrategy {
+ /**
+ * provides access to sharding properties (under the suffix sharding_strategy)
+ * and provide access to all the DirectoryProviders for a given index
+ */
+ void initialize(Properties properties, DirectoryProvider<?>[] providers);
+
+ /**
+ * Ask for all shards (eg to query or optimize)
+ */
+ DirectoryProvider<?>[] getDirectoryProvidersForAllShards();
+
+ /**
+ * return the DirectoryProvider where the given entity will be indexed
+ */
+ DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document);
+ /**
+ * return the DirectoryProvider(s) where the given entity is stored and where the deletion operation needs to be applied
+ * id and idInString can be null. If null, all the directory providers containing entity types should be returned
+ */
+ DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString);
+
+ /**
+ * return the set of DirectoryProvider(s) where the entities matching the filters are stored
+ * this optional optimization allows queries to hit a subset of all shards, which may be useful for some datasets
+ * if this optimization is not needed, return getDirectoryProvidersForAllShards()
+ *
+ * fullTextFilters can be empty if no filter is applied
+ */
+ DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] fullTextFilters);
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/IndexShardingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/LockFactoryFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/LockFactoryFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/LockFactoryFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.File;
+import java.util.Properties;
+
+import org.apache.lucene.store.LockFactory;
+
+/**
+ * To use a custom implementation of org.apache.lucene.store.LockFactory
+ * you need to implement this interface and define the fully qualified
+ * classname of the factory implementation as a DirectoryProvider parameter
+ * for the locking_strategy key.
+ * The implementation must have a no-arg constructor.
+ *
+ * @author Sanne Grinovero
+ */
+public interface LockFactoryFactory {
+
+ /**
+ * Creates a LockFactory implementation.
+ * A different LockFactory is created for each DirectoryProvider.
+ * @param indexDir path to the indexBase setting, or null for
+ * DirectoryProviders which don't rely on filesystem
+ * @param dirConfiguration the properties set on the current DirectoryProvider
+ * @return the created LockFactory
+ */
+ LockFactory createLockFactory(File indexDir, Properties dirConfiguration);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/LockFactoryFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/NotShardedStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/NotShardedStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/NotShardedStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,62 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.util.Properties;
+import java.io.Serializable;
+
+import org.apache.lucene.document.Document;
+import org.hibernate.annotations.common.AssertionFailure;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class NotShardedStrategy implements IndexShardingStrategy {
+ private DirectoryProvider<?>[] directoryProvider;
+ public void initialize(Properties properties, DirectoryProvider<?>[] providers) {
+ this.directoryProvider = providers;
+ if ( directoryProvider.length > 1) {
+ throw new AssertionFailure("Using SingleDirectoryProviderSelectionStrategy with multiple DirectryProviders");
+ }
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForAllShards() {
+ return directoryProvider;
+ }
+
+ public DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document) {
+ return directoryProvider[0];
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString) {
+ return directoryProvider;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] fullTextFilters) {
+ return directoryProvider;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/NotShardedStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/RAMDirectoryProvider.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/RAMDirectoryProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/RAMDirectoryProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.RAMDirectory;
+import org.hibernate.HibernateException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * Use a Lucene RAMDirectory
+ *
+ * @author Emmanuel Bernard
+ * @author Sylvain Vieujot
+ */
+public class RAMDirectoryProvider implements DirectoryProvider<RAMDirectory> {
+
+ private final RAMDirectory directory = new RAMDirectory();
+ private String indexName;
+
+ public void initialize(String directoryProviderName, Properties properties, SearchFactoryImplementor searchFactoryImplementor) {
+ indexName = directoryProviderName;
+ directory.setLockFactory( DirectoryProviderHelper.createLockFactory( null, properties ) );
+ }
+
+ public void start() {
+ try {
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( directory, new SimpleAnalyzer(), true, fieldLength );
+ iw.close();
+ }
+ catch (IOException e) {
+ throw new HibernateException( "Unable to initialize index: " + indexName, e );
+ }
+ }
+
+
+ public RAMDirectory getDirectory() {
+ return directory;
+ }
+
+ public void stop() {
+ directory.close();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ if ( obj == this ) return true;
+ if ( obj == null || !( obj instanceof RAMDirectoryProvider ) ) return false;
+ return indexName.equals( ( (RAMDirectoryProvider) obj ).indexName );
+ }
+
+ @Override
+ public int hashCode() {
+ // this code is actually broken since the value change after initialize call
+ // but from a practical POV this is fine since we only call this method
+ // after initialize call
+ int hash = 7;
+ return 29 * hash + indexName.hashCode();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/RAMDirectoryProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/IncrementalOptimizerStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/IncrementalOptimizerStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/IncrementalOptimizerStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store.optimization;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.lucene.index.IndexWriter;
+import org.slf4j.Logger;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Optimization strategy triggered after a certain amount of operations
+ *
+ * @author Emmanuel Bernard
+ */
+public class IncrementalOptimizerStrategy implements OptimizerStrategy {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private int operationMax = -1;
+ private int transactionMax = -1;
+ private long operations = 0;
+ private long transactions = 0;
+ private DirectoryProvider directoryProvider;
+
+ public void initialize(DirectoryProvider directoryProvider, Properties indexProperties, SearchFactoryImplementor searchFactoryImplementor) {
+ this.directoryProvider = directoryProvider;
+ operationMax = ConfigurationParseHelper.getIntValue( indexProperties, "optimizer.operation_limit.max", -1 );
+ transactionMax = ConfigurationParseHelper.getIntValue( indexProperties, "optimizer.transaction_limit.max", -1 );
+ }
+
+ public void optimizationForced() {
+ operations = 0;
+ transactions = 0;
+ }
+
+ public boolean needOptimization() {
+ return (operationMax != -1 && operations >= operationMax)
+ || (transactionMax != -1 && transactions >= transactionMax);
+ }
+
+ public void addTransaction(long operations) {
+ this.operations += operations;
+ this.transactions++;
+ }
+
+ public void optimize(Workspace workspace) {
+ if ( needOptimization() ) {
+ log.debug( "Optimize {} after {} operations and {} transactions",
+ new Object[] { directoryProvider.getDirectory(), operations, transactions });
+ IndexWriter writer = workspace.getIndexWriter( false ); //TODO true or false?
+ try {
+ writer.optimize();
+ }
+ catch (IOException e) {
+ throw new SearchException( "Unable to optimize directoryProvider: "
+ + directoryProvider.getDirectory().toString(), e );
+ }
+ optimizationForced();
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/IncrementalOptimizerStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/NoOpOptimizerStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/NoOpOptimizerStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/NoOpOptimizerStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store.optimization;
+
+import java.util.Properties;
+
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.backend.Workspace;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class NoOpOptimizerStrategy implements OptimizerStrategy {
+ public void initialize(DirectoryProvider directoryProvider, Properties indexProperties, SearchFactoryImplementor searchFactoryImplementor) {
+ }
+
+ public void optimizationForced() {
+ }
+
+ public boolean needOptimization() {
+ return false;
+ }
+
+ public void addTransaction(long operations) {
+ }
+
+ public void optimize(Workspace workspace) {
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/NoOpOptimizerStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/OptimizerStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/OptimizerStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/OptimizerStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,59 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.store.optimization;
+
+import java.util.Properties;
+
+import org.hibernate.search.backend.Workspace;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface OptimizerStrategy {
+ public void initialize(DirectoryProvider directoryProvider, Properties indexProperties, SearchFactoryImplementor searchFactoryImplementor);
+
+ /**
+ * has to be called in a thread safe way
+ */
+ void optimizationForced();
+
+ /**
+ * has to be called in a thread safe way
+ */
+ boolean needOptimization();
+
+ /**
+ * has to be called in a thread safe way
+ */
+ public void addTransaction(long operations);
+
+ /**
+ * has to be called in a thread safe way
+ */
+ void optimize(Workspace workspace);
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/store/optimization/OptimizerStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ContextHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ContextHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ContextHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import org.hibernate.HibernateException;
+import org.hibernate.Session;
+import org.hibernate.engine.SessionImplementor;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+
+/**
+ * @author Emmanuel Bernard
+ * @deprecated Use {@link org.hibernate.search.FullTextSession#getSearchFactory()} instead.
+ */
+public abstract class ContextHelper {
+
+ public static SearchFactoryImplementor getSearchFactory(Session session) {
+ return getSearchFactoryBySFI( (SessionImplementor) session );
+ }
+
+
+ public static SearchFactoryImplementor getSearchFactoryBySFI(SessionImplementor session) {
+ PostInsertEventListener[] listeners = session.getListeners().getPostInsertEventListeners();
+ FullTextIndexEventListener listener = null;
+ //FIXME this sucks since we mandate the event listener use
+ for ( PostInsertEventListener candidate : listeners ) {
+ if ( candidate instanceof FullTextIndexEventListener ) {
+ listener = (FullTextIndexEventListener) candidate;
+ break;
+ }
+ }
+ if ( listener == null ) throw new HibernateException(
+ "Hibernate Search Event listeners not configured, please check the reference documentation and the " +
+ "application's hibernate.cfg.xml" );
+ return listener.getSearchFactoryImplementor();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ContextHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/DelegateNamedAnalyzer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/DelegateNamedAnalyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/DelegateNamedAnalyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.io.IOException;
+import java.io.Reader;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+
+/**
+ * delegate to a named analyzer
+ * delegated Analyzers are lazily configured
+ *
+ * @author Emmanuel Bernard
+ */
+public class DelegateNamedAnalyzer extends Analyzer {
+ private String name;
+ private Analyzer delegate;
+
+ public DelegateNamedAnalyzer(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setDelegate(Analyzer delegate) {
+ this.delegate = delegate;
+ this.name = null; //unique init
+ }
+
+ public TokenStream tokenStream(String fieldName, Reader reader) {
+ return delegate.tokenStream( fieldName, reader );
+ }
+
+ @Override
+ public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException {
+ return delegate.reusableTokenStream( fieldName, reader );
+ }
+
+ @Override
+ public int getPositionIncrementGap(String fieldName) {
+ return delegate.getPositionIncrementGap( fieldName );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/DelegateNamedAnalyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FileHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FileHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FileHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,202 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.slf4j.Logger;
+
+/**
+ * Utility class for synchronizing files/directories.
+ *
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ */
+public abstract class FileHelper {
+
+ private static final Logger log = LoggerFactory.make();
+ private static final int FAT_PRECISION = 2000;
+ public static final long DEFAULT_COPY_BUFFER_SIZE = 16 * 1024 * 1024; // 16 MB
+
+
+ public static boolean areInSync(File source, File destination) throws IOException {
+ if ( source.isDirectory() ) {
+ if ( !destination.exists() ) {
+ return false;
+ }
+ else if ( !destination.isDirectory() ) {
+ throw new IOException(
+ "Source and Destination not of the same type:"
+ + source.getCanonicalPath() + " , " + destination.getCanonicalPath()
+ );
+ }
+ String[] sources = source.list();
+ Set<String> srcNames = new HashSet<String>( Arrays.asList( sources ) );
+ String[] dests = destination.list();
+
+ // check for files in destination and not in source
+ for ( String fileName : dests ) {
+ if ( !srcNames.contains( fileName ) ) {
+ return false;
+ }
+ }
+
+ boolean inSync = true;
+ for ( String fileName : sources ) {
+ File srcFile = new File( source, fileName );
+ File destFile = new File( destination, fileName );
+ if ( !areInSync( srcFile, destFile ) ) {
+ inSync = false;
+ break;
+ }
+ }
+ return inSync;
+ }
+ else {
+ if ( destination.exists() && destination.isFile() ) {
+ long sts = source.lastModified() / FAT_PRECISION;
+ long dts = destination.lastModified() / FAT_PRECISION;
+ return sts == dts;
+ }
+ else {
+ return false;
+ }
+ }
+ }
+
+ public static void synchronize(File source, File destination, boolean smart) throws IOException {
+ synchronize( source, destination, smart, DEFAULT_COPY_BUFFER_SIZE );
+ }
+
+ public static void synchronize(File source, File destination, boolean smart, long chunkSize) throws IOException {
+ if ( chunkSize <= 0 ) {
+ log.warn( "Chunk size must be positive: using default value." );
+ chunkSize = DEFAULT_COPY_BUFFER_SIZE;
+ }
+ if ( source.isDirectory() ) {
+ if ( !destination.exists() ) {
+ if ( !destination.mkdirs() ) {
+ throw new IOException( "Could not create path " + destination );
+ }
+ }
+ else if ( !destination.isDirectory() ) {
+ throw new IOException(
+ "Source and Destination not of the same type:"
+ + source.getCanonicalPath() + " , " + destination.getCanonicalPath()
+ );
+ }
+ String[] sources = source.list();
+ Set<String> srcNames = new HashSet<String>( Arrays.asList( sources ) );
+ String[] dests = destination.list();
+
+ //delete files not present in source
+ for ( String fileName : dests ) {
+ if ( !srcNames.contains( fileName ) ) {
+ delete( new File( destination, fileName ) );
+ }
+ }
+ //copy each file from source
+ for ( String fileName : sources ) {
+ File srcFile = new File( source, fileName );
+ File destFile = new File( destination, fileName );
+ synchronize( srcFile, destFile, smart, chunkSize );
+ }
+ }
+ else {
+ if ( destination.exists() && destination.isDirectory() ) {
+ delete( destination );
+ }
+ if ( destination.exists() ) {
+ long sts = source.lastModified() / FAT_PRECISION;
+ long dts = destination.lastModified() / FAT_PRECISION;
+ //do not copy if smart and same timestamp and same length
+ if ( !smart || sts == 0 || sts != dts || source.length() != destination.length() ) {
+ copyFile( source, destination, chunkSize );
+ }
+ }
+ else {
+ copyFile( source, destination, chunkSize );
+ }
+ }
+ }
+
+ private static void copyFile(File srcFile, File destFile, long chunkSize) throws IOException {
+ FileInputStream is = null;
+ FileOutputStream os = null;
+ try {
+ is = new FileInputStream( srcFile );
+ FileChannel iChannel = is.getChannel();
+ os = new FileOutputStream( destFile, false );
+ FileChannel oChannel = os.getChannel();
+ long doneBytes = 0L;
+ long todoBytes = srcFile.length();
+ while ( todoBytes != 0L ) {
+ long iterationBytes = Math.min( todoBytes, chunkSize );
+ long transferredLength = oChannel.transferFrom( iChannel, doneBytes, iterationBytes );
+ if ( iterationBytes != transferredLength ) {
+ throw new IOException(
+ "Error during file transfer: expected "
+ + iterationBytes + " bytes, only " + transferredLength + " bytes copied."
+ );
+ }
+ doneBytes += transferredLength;
+ todoBytes -= transferredLength;
+ }
+ }
+ finally {
+ if ( is != null ) {
+ is.close();
+ }
+ if ( os != null ) {
+ os.close();
+ }
+ }
+ boolean successTimestampOp = destFile.setLastModified( srcFile.lastModified() );
+ if ( !successTimestampOp ) {
+ log.warn( "Could not change timestamp for {}. Index synchronization may be slow.", destFile );
+ }
+ }
+
+ public static void delete(File file) {
+ if ( file.isDirectory() ) {
+ for ( File subFile : file.listFiles() ) {
+ delete( subFile );
+ }
+ }
+ if ( file.exists() ) {
+ if ( !file.delete() ) {
+ log.error( "Could not delete {}", file );
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FileHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FilterCacheModeTypeHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FilterCacheModeTypeHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FilterCacheModeTypeHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import org.hibernate.search.annotations.FilterCacheModeType;
+import org.hibernate.annotations.common.AssertionFailure;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FilterCacheModeTypeHelper {
+ private FilterCacheModeTypeHelper() {}
+
+ public static boolean cacheInstance(FilterCacheModeType type) {
+ switch ( type ) {
+ case NONE:
+ return false;
+ case INSTANCE_AND_DOCIDSETRESULTS:
+ return true;
+ case INSTANCE_ONLY:
+ return true;
+ default:
+ throw new AssertionFailure("Unknwn FilterCacheModeType:" + type);
+ }
+ }
+
+ public static boolean cacheResults(FilterCacheModeType type) {
+ switch ( type ) {
+ case NONE:
+ return false;
+ case INSTANCE_AND_DOCIDSETRESULTS:
+ return true;
+ case INSTANCE_ONLY:
+ return false;
+ default:
+ throw new AssertionFailure("Unknwn FilterCacheModeType:" + type);
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/FilterCacheModeTypeHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/HibernateSearchResourceLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/HibernateSearchResourceLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/HibernateSearchResourceLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,99 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.io.InputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import org.apache.solr.common.ResourceLoader;
+import org.apache.solr.util.plugin.ResourceLoaderAware;
+
+import org.hibernate.util.ReflectHelper;
+import org.hibernate.search.SearchException;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class HibernateSearchResourceLoader implements ResourceLoader {
+ public InputStream openResource(String resource) throws IOException {
+ return Thread.currentThread().getContextClassLoader().getResourceAsStream( resource );
+ }
+
+ public List<String> getLines(String resource) throws IOException {
+ BufferedReader reader = null;
+ try {
+ reader = new BufferedReader( new InputStreamReader( openResource( resource ) ) );
+ List<String> results = new ArrayList<String>();
+ String line = reader.readLine();
+ while ( line != null ) {
+ //comment or empty line
+ if ( line.length() != 0 && !line.startsWith( "#" ) ) {
+ results.add( line );
+ }
+ line = reader.readLine();
+ }
+ return Collections.unmodifiableList( results );
+ }
+ finally {
+ try {
+ if (reader != null) reader.close();
+ }
+ catch ( Exception e ) {
+ //we don't really care if we can't close
+ }
+ }
+ }
+
+ public Object newInstance(String cname, String... subpackages) {
+ if (subpackages != null && subpackages.length > 0)
+ throw new UnsupportedOperationException( "newInstance(classname, packages) not implemented" );
+
+ final Class<?> clazz;
+ try {
+ clazz = ReflectHelper.classForName( cname );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new SearchException("Unable to find class " + cname, e);
+ }
+ try {
+ final Object instance = clazz.newInstance();
+ if (instance instanceof ResourceLoaderAware) {
+ ( ( ResourceLoaderAware) instance ).inform( this );
+ }
+ return instance;
+ }
+ catch ( InstantiationException e ) {
+ throw new SearchException("Unable to instanciate class with no-arg constructor: " + cname, e);
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException("Unable to instanciate class with no-arg constructor: " + cname, e);
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/HibernateSearchResourceLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/LoggerFactory.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/LoggerFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/LoggerFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,41 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import org.slf4j.Logger;
+
+/**
+ * A factory class for class loggers. Allows a creation of loggers after the DRY principle.
+ *
+ * @author Hardy Ferentschik
+ */
+public class LoggerFactory {
+ public static Logger make() {
+ Throwable t = new Throwable();
+ StackTraceElement directCaller = t.getStackTrace()[1];
+ return org.slf4j.LoggerFactory.getLogger( directCaller.getClassName() );
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/LoggerFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/PluginLoader.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/PluginLoader.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/PluginLoader.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,138 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.util.ReflectHelper;
+
+/**
+ * Utility class to load instances of other classes by using a fully qualified name,
+ * or from a class type.
+ * Uses reflection and throws SearchException(s) with proper descriptions of the error,
+ * like the target class is missing a proper constructor, is an interface, is not found...
+ *
+ * @author Sanne Grinovero
+ */
+public class PluginLoader {
+
+ /**
+ * Creates an instance of a target class designed by fully qualified name
+ *
+ * @param <T> matches the type of targetSuperType: defines the return type
+ * @param targetSuperType the return type of the function, the classNameToLoad will be checked
+ * to be assignable to this type.
+ * @param classNameToLoad a fully qualified class name, whose type is assignable to targetSuperType
+ * @param caller the class of the caller, needed for classloading purposes
+ * @param componentDescription a meaningful description of the role the instance will have,
+ * used to enrich error messages to describe the context of the error
+ * @return a new instance of classNameToLoad
+ * @throws SearchException wrapping other error types with a proper error message for all kind of problems, like
+ * classNotFound, missing proper constructor, wrong type, security errors.
+ */
+ public static <T> T instanceFromName(Class<T> targetSuperType, String classNameToLoad,
+ Class<?> caller, String componentDescription) {
+ final Class<?> clazzDef;
+ try {
+ clazzDef = ReflectHelper.classForName( classNameToLoad, caller );
+ } catch (ClassNotFoundException e) {
+ throw new SearchException( "Unable to find " + componentDescription +
+ " implementation class: " + classNameToLoad, e );
+ }
+ return instanceFromClass( targetSuperType, clazzDef, componentDescription );
+ }
+
+ /**
+ * Creates an instance of target class
+ * @param <T> the type of targetSuperType: defines the return type
+ * @param targetSuperType the created instance will be checked to be assignable to this type
+ * @param classToLoad the class to be instantiated
+ * @param componentDescription a role name/description to contextualize error messages
+ * @return a new instance of classToLoad
+ * @throws SearchException wrapping other error types with a proper error message for all kind of problems, like
+ * missing proper constructor, wrong type, security errors.
+ */
+ @SuppressWarnings("unchecked")
+ public static <T> T instanceFromClass(Class<T> targetSuperType, Class<?> classToLoad, String componentDescription) {
+ checkClassType( classToLoad, componentDescription );
+ checkHasValidconstructor( classToLoad, componentDescription );
+ Object instance;
+ try {
+ instance = classToLoad.newInstance();
+ }
+ catch ( IllegalAccessException e ) {
+ throw new SearchException(
+ "Unable to instantiate " + componentDescription + " class: " + classToLoad.getName() +
+ ". Class or constructor is not accessible.", e );
+ }
+ catch ( InstantiationException e ) {
+ throw new SearchException(
+ "Unable to instantiate " + componentDescription + " class: " + classToLoad.getName() +
+ ". Verify it has a no-args public constructor and is not abstract.", e );
+ }
+ if ( ! targetSuperType.isInstance( instance ) ) {
+ // have a proper error message according to interface implementation or subclassing
+ if ( targetSuperType.isInterface() ) {
+ throw new SearchException(
+ "Wrong configuration of " + componentDescription + ": class " + classToLoad.getName()
+ + " does not implement interface " + targetSuperType.getName() );
+ }
+ else {
+ throw new SearchException(
+ "Wrong configuration of " + componentDescription + ": class " + classToLoad.getName()
+ + " is not a subtype of " + targetSuperType.getName() );
+ }
+ }
+ else {
+ return (T) instance;
+ }
+ }
+
+ private static void checkClassType(Class<?> classToLoad, String componentDescription) {
+ if ( classToLoad.isInterface() ) {
+ throw new SearchException( classToLoad.getName() + " defined for component " + componentDescription
+ + " is an interface: implementation required." );
+ }
+ }
+
+ /**
+ * Verifies if target class has a no-args constructor, and that it is
+ * accessible in current security manager.
+ * @param classToLoad the class type to check
+ * @param componentDescription adds a meaningful description to the type to describe in the
+ * exception message
+ */
+ public static void checkHasValidconstructor(Class<?> classToLoad, String componentDescription) {
+ try {
+ classToLoad.getConstructor();
+ } catch (SecurityException e) {
+ throw new SearchException( classToLoad.getName() + " defined for component " + componentDescription
+ + " could not be instantiated because of a security manager error", e );
+ } catch (NoSuchMethodException e) {
+ throw new SearchException( classToLoad.getName() + " defined for component " + componentDescription
+ + " is missing a no-arguments constructor" );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/PluginLoader.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ReflectionHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ReflectionHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ReflectionHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.lang.reflect.Modifier;
+
+import org.hibernate.annotations.common.reflection.XMember;
+import org.hibernate.annotations.common.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public abstract class ReflectionHelper {
+
+ private ReflectionHelper() {
+ }
+
+ /**
+ * Get attribute name out of member unless overriden by <code>name</code>.
+ *
+ * @param member <code>XMember</code> from which to extract the name.
+ * @param name Override value which will be returned in case it is not empty.
+ *
+ * @return attribute name out of member unless overriden by <code>name</code>.
+ */
+ public static String getAttributeName(XMember member, String name) {
+ return StringHelper.isNotEmpty( name ) ?
+ name :
+ member.getName(); //explicit field name
+ }
+
+ public static void setAccessible(XMember member) {
+ if ( !Modifier.isPublic( member.getModifiers() ) ) {
+ member.setAccessible( true );
+ }
+ }
+
+ public static Object getMemberValue(Object bean, XMember getter) {
+ Object value;
+ try {
+ value = getter.invoke( bean );
+ }
+ catch ( Exception e ) {
+ throw new IllegalStateException( "Could not get property value", e );
+ }
+ return value;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ReflectionHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ScopedAnalyzer.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ScopedAnalyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ScopedAnalyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+
+/**
+ * A <code>ScopedAnalyzer</code> is a wrapper class containing all analyzers for a given class.
+ * <code>ScopedAnalyzer</code> behaves similar to <code>PerFieldAnalyzerWrapper</code> by delegating requests for
+ * <code>TokenStream</code>s to the underlying <code>Analyzer</code> depending on the requested field name.
+ *
+ * @author Emmanuel Bernard
+ */
+public class ScopedAnalyzer extends Analyzer {
+ private Analyzer globalAnalyzer;
+ private Map<String, Analyzer> scopedAnalyzers = new HashMap<String, Analyzer>();
+
+ public ScopedAnalyzer() {
+ }
+
+ private ScopedAnalyzer( Analyzer globalAnalyzer, Map<String, Analyzer> scopedAnalyzers) {
+ this.globalAnalyzer = globalAnalyzer;
+ for ( Map.Entry<String, Analyzer> entry : scopedAnalyzers.entrySet() ) {
+ addScopedAnalyzer( entry.getKey(), entry.getValue() );
+ }
+ }
+
+ public void setGlobalAnalyzer( Analyzer globalAnalyzer ) {
+ this.globalAnalyzer = globalAnalyzer;
+ }
+
+ public void addScopedAnalyzer( String scope, Analyzer scopedAnalyzer ) {
+ scopedAnalyzers.put(scope, scopedAnalyzer);
+ }
+
+ public TokenStream tokenStream( String fieldName, Reader reader ) {
+ return getAnalyzer(fieldName).tokenStream(fieldName, reader);
+ }
+
+ public int getPositionIncrementGap( String fieldName ) {
+ return getAnalyzer(fieldName).getPositionIncrementGap(fieldName);
+ }
+
+ private Analyzer getAnalyzer( String fieldName ) {
+ Analyzer analyzer = scopedAnalyzers.get(fieldName);
+ if ( analyzer == null ) {
+ analyzer = globalAnalyzer;
+ }
+ return analyzer;
+ }
+
+ public ScopedAnalyzer clone() {
+ ScopedAnalyzer clone = new ScopedAnalyzer( globalAnalyzer, scopedAnalyzers );
+ return clone;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/ScopedAnalyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/WeakIdentityHashMap.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/WeakIdentityHashMap.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/WeakIdentityHashMap.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,1042 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+
+/*
+ * JBoss, Home of Professional Open Source
+ * Copyright 2005, JBoss Inc., and individual contributors as indicated
+ * by the @authors tag. See the copyright.txt in the distribution for a
+ * full listing of individual contributors.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.hibernate.search.util;
+
+
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.AbstractCollection;
+import java.util.AbstractSet;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.ConcurrentModificationException;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+/**
+ * A hashtable-based <tt>Map</tt> implementation with <em>weak keys</em> and
+ * using reference-equality in place of object-equality when comparing keys
+ * (and values). In an <tt>WeakIdentityHashMap</tt>, two keys <tt>k1</tt> and
+ * <tt>k2</tt> are considered equal if and only if <tt>(k1==k2)</tt>.
+ * An entry in a <tt>WeakIdentityHashMap</tt> will automatically be removed when
+ * its key is no longer in ordinary use. More precisely, the presence of a
+ * mapping for a given key will not prevent the key from being discarded by the
+ * garbage collector, that is, made finalizable, finalized, and then reclaimed.
+ * When a key has been discarded its entry is effectively removed from the map.
+ * <p/>
+ * <p>Based on java.util.WeakHashMap</p>
+ * <p>Based on org.jboss.common.collection.WeakIdentityHashMap</p>
+ *
+ * @author Dawid Kurzyniec
+ * @author <a href="mailto:kabir.khan@jboss.org">Kabir Khan</a>
+ * @author Emmanuel Bernard
+
+ * @see java.util.IdentityHashMap
+ * @see java.util.WeakHashMap
+ */
+public class WeakIdentityHashMap<K,V> /*extends AbstractMap*/ implements Map<K,V> {
+
+ /**
+ * The default initial capacity -- MUST be a power of two.
+ */
+ private static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+ /**
+ * The maximum capacity, used if a higher value is implicitly specified
+ * by either of the constructors with arguments.
+ * MUST be a power of two <= 1<<30.
+ */
+ private static final int MAXIMUM_CAPACITY = 1 << 30;
+
+ /**
+ * The load fast used when none specified in constructor.
+ */
+ private static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+ /**
+ * The table, resized as necessary. Length MUST Always be a power of two.
+ */
+ private Entry<K,V>[] table;
+
+ /**
+ * The number of key-value mappings contained in this weak hash map.
+ */
+ private int size;
+
+ /**
+ * The next size value at which to resize (capacity * load factor).
+ */
+ private int threshold;
+
+ /**
+ * The load factor for the hash table.
+ */
+ private final float loadFactor;
+
+ /**
+ * Reference queue for cleared WeakEntries
+ */
+ private final ReferenceQueue queue = new ReferenceQueue();
+
+ /**
+ * The number of times this HashMap has been structurally modified
+ * Structural modifications are those that change the number of mappings in
+ * the HashMap or otherwise modify its internal structure (e.g.,
+ * rehash). This field is used to make iterators on Collection-views of
+ * the HashMap fail-fast. (See ConcurrentModificationException).
+ */
+ private volatile int modCount;
+
+ /**
+ * Each of these fields are initialized to contain an instance of the
+ * appropriate view the first time this view is requested. The views are
+ * stateless, so there's no reason to create more than one of each.
+ */
+ transient volatile Set keySet = null;
+ transient volatile Collection values = null;
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the given
+ * initial capacity and the given load factor.
+ *
+ * @param initialCapacity The initial capacity of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @param loadFactor The load factor of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @throws IllegalArgumentException If the initial capacity is negative,
+ * or if the load factor is nonpositive.
+ */
+ public WeakIdentityHashMap(int initialCapacity, float loadFactor) {
+ if ( initialCapacity < 0 )
+ throw new IllegalArgumentException( "Illegal Initial Capacity: " +
+ initialCapacity );
+ if ( initialCapacity > MAXIMUM_CAPACITY )
+ initialCapacity = MAXIMUM_CAPACITY;
+
+ if ( loadFactor <= 0 || Float.isNaN( loadFactor ) )
+ throw new IllegalArgumentException( "Illegal Load factor: " +
+ loadFactor );
+ int capacity = 1;
+ while ( capacity < initialCapacity )
+ capacity <<= 1;
+ table = new Entry[capacity];
+ this.loadFactor = loadFactor;
+ threshold = (int) ( capacity * loadFactor );
+ }
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the given
+ * initial capacity and the default load factor, which is <tt>0.75</tt>.
+ *
+ * @param initialCapacity The initial capacity of the
+ * <tt>WeakIdentityHashMap</tt>
+ * @throws IllegalArgumentException If the initial capacity is negative.
+ */
+ public WeakIdentityHashMap(int initialCapacity) {
+ this( initialCapacity, DEFAULT_LOAD_FACTOR );
+ }
+
+ /**
+ * Constructs a new, empty <tt>WeakIdentityHashMap</tt> with the default
+ * initial capacity (16) and the default load factor (0.75).
+ */
+ public WeakIdentityHashMap() {
+ this.loadFactor = DEFAULT_LOAD_FACTOR;
+ threshold = DEFAULT_INITIAL_CAPACITY;
+ table = new Entry[DEFAULT_INITIAL_CAPACITY];
+ }
+
+ /**
+ * Constructs a new <tt>WeakIdentityHashMap</tt> with the same mappings as
+ * the specified <tt>Map</tt>. The <tt>WeakIdentityHashMap</tt> is created
+ * with default load factor, which is <tt>0.75</tt> and an initial capacity
+ * sufficient to hold the mappings in the specified <tt>Map</tt>.
+ *
+ * @param t the map whose mappings are to be placed in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public WeakIdentityHashMap(Map t) {
+ this( Math.max( (int) ( t.size() / DEFAULT_LOAD_FACTOR ) + 1, 16 ),
+ DEFAULT_LOAD_FACTOR );
+ putAll( t );
+ }
+
+ // internal utilities
+
+ /**
+ * Value representing null keys inside tables.
+ */
+ private static final Object NULL_KEY = new Object();
+
+ /**
+ * Use NULL_KEY for key if it is null.
+ */
+ private static <T> T maskNull(T key) {
+ return ( key == null ?
+ (T) NULL_KEY : //i don't think there is a better way
+ key );
+ }
+
+ /**
+ * Return internal representation of null key back to caller as null
+ */
+ private static <T> T unmaskNull(T key) {
+ return ( key == NULL_KEY ?
+ null :
+ key );
+ }
+
+ /**
+ * Return a hash code for non-null Object x.
+ */
+ int hash(Object x) {
+ int h = System.identityHashCode( x );
+ return h - ( h << 7 ); // that is,, -127 * h
+ }
+
+ /**
+ * Return index for hash code h.
+ */
+ static int indexFor(int h, int length) {
+ return h & ( length - 1 );
+ }
+
+ /**
+ * Expunge stale entries from the table.
+ */
+ private void expungeStaleEntries() {
+ Object r;
+ while ( ( r = queue.poll() ) != null ) {
+ Entry e = (Entry) r;
+ int h = e.hash;
+ int i = indexFor( h, table.length );
+
+ Entry prev = table[i];
+ Entry p = prev;
+ while ( p != null ) {
+ Entry next = p.next;
+ if ( p == e ) {
+ if ( prev == e )
+ table[i] = next;
+ else
+ prev.next = next;
+ e.next = null; // Help GC
+ e.value = null; // " "
+ size--;
+ break;
+ }
+ prev = p;
+ p = next;
+ }
+ }
+ }
+
+ /**
+ * Return the table after first expunging stale entries
+ */
+ private Entry<K,V>[] getTable() {
+ expungeStaleEntries();
+ return table;
+ }
+
+ /**
+ * Returns the number of key-value mappings in this map.
+ * This result is a snapshot, and may not reflect unprocessed
+ * entries that will be removed before next attempted access
+ * because they are no longer referenced.
+ */
+ public int size() {
+ if ( size == 0 )
+ return 0;
+ expungeStaleEntries();
+ return size;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains no key-value mappings.
+ * This result is a snapshot, and may not reflect unprocessed
+ * entries that will be removed before next attempted access
+ * because they are no longer referenced.
+ */
+ public boolean isEmpty() {
+ return size() == 0;
+ }
+
+ /**
+ * Returns the value to which the specified key is mapped in this weak
+ * hash map, or <tt>null</tt> if the map contains no mapping for
+ * this key. A return value of <tt>null</tt> does not <i>necessarily</i>
+ * indicate that the map contains no mapping for the key; it is also
+ * possible that the map explicitly maps the key to <tt>null</tt>. The
+ * <tt>containsKey</tt> method may be used to distinguish these two
+ * cases.
+ *
+ * @param key the key whose associated value is to be returned.
+ * @return the value to which this map maps the specified key, or
+ * <tt>null</tt> if the map contains no mapping for this key.
+ * @see #put(Object,Object)
+ */
+ public V get(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry<K,V>[] tab = getTable();
+ int index = indexFor( h, tab.length );
+ Entry<K,V> e = tab[index];
+ while ( e != null ) {
+ if ( e.hash == h && k == e.get() )
+ return e.value;
+ e = e.next;
+ }
+ return null;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains a mapping for the
+ * specified key.
+ *
+ * @param key The key whose presence in this map is to be tested
+ * @return <tt>true</tt> if there is a mapping for <tt>key</tt>;
+ * <tt>false</tt> otherwise
+ */
+ public boolean containsKey(Object key) {
+ return getEntry( key ) != null;
+ }
+
+ /**
+ * Returns the entry associated with the specified key in the HashMap.
+ * Returns null if the HashMap contains no mapping for this key.
+ */
+ Entry<K,V> getEntry(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry<K,V>[] tab = getTable();
+ int index = indexFor( h, tab.length );
+ Entry<K,V> e = tab[index];
+ while ( e != null && !( e.hash == h && k == e.get() ) )
+ e = e.next;
+ return e;
+ }
+
+ /**
+ * Associates the specified value with the specified key in this map.
+ * If the map previously contained a mapping for this key, the old
+ * value is replaced.
+ *
+ * @param key key with which the specified value is to be associated.
+ * @param value value to be associated with the specified key.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the HashMap previously associated
+ * <tt>null</tt> with the specified key.
+ */
+ public V put(K key, V value) {
+ K k = maskNull( key );
+ int h = hash( k );
+ Entry<K,V>[] tab = getTable();
+ int i = indexFor( h, tab.length );
+
+ for ( Entry<K,V> e = tab[i]; e != null; e = e.next ) {
+ if ( h == e.hash && k == e.get() ) {
+ V oldValue = e.value;
+ if ( value != oldValue )
+ e.value = value;
+ return oldValue;
+ }
+ }
+
+ modCount++;
+ tab[i] = new Entry<K,V>( k, value, queue, h, tab[i] );
+ if ( ++size >= threshold )
+ resize( tab.length * 2 );
+ return null;
+ }
+
+ /**
+ * Rehashes the contents of this map into a new <tt>HashMap</tt> instance
+ * with a larger capacity. This method is called automatically when the
+ * number of keys in this map exceeds its capacity and load factor.
+ * <p/>
+ * Note that this method is a no-op if it's called with newCapacity ==
+ * 2*MAXIMUM_CAPACITY (which is Integer.MIN_VALUE).
+ *
+ * @param newCapacity the new capacity, MUST be a power of two.
+ */
+ void resize(int newCapacity) {
+ // assert (newCapacity & -newCapacity) == newCapacity; // power of 2
+
+ Entry<K,V>[] oldTable = getTable();
+ int oldCapacity = oldTable.length;
+
+ // check if needed
+ if ( size < threshold || oldCapacity > newCapacity )
+ return;
+
+ Entry<K,V>[] newTable = new Entry[newCapacity];
+
+ transfer( oldTable, newTable );
+ table = newTable;
+
+ /*
+ * If ignoring null elements and processing ref queue caused massive
+ * shrinkage, then restore old table. This should be rare, but avoids
+ * unbounded expansion of garbage-filled tables.
+ */
+ if ( size >= threshold / 2 ) {
+ threshold = (int) ( newCapacity * loadFactor );
+ }
+ else {
+ expungeStaleEntries();
+ transfer( newTable, oldTable );
+ table = oldTable;
+ }
+ }
+
+ /**
+ * Transfer all entries from src to dest tables
+ */
+ private void transfer(Entry<K,V>[] src, Entry<K,V>[] dest) {
+ for ( int j = 0; j < src.length; ++j ) {
+ Entry<K,V> e = src[j];
+ src[j] = null;
+ while ( e != null ) {
+ Entry<K,V> next = e.next;
+ K key = e.get();
+ if ( key == null ) {
+ e.next = null; // Help GC
+ e.value = null; // " "
+ size--;
+ }
+ else {
+ int i = indexFor( e.hash, dest.length );
+ e.next = dest[i];
+ dest[i] = e;
+ }
+ e = next;
+ }
+ }
+ }
+
+ /**
+ * Copies all of the mappings from the specified map to this map These
+ * mappings will replace any mappings that this map had for any of the
+ * keys currently in the specified map.<p>
+ *
+ * @param t mappings to be stored in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public void putAll(Map<? extends K, ? extends V> t) {
+ // Expand enough to hold t's elements without resizing.
+ int n = t.size();
+ if ( n == 0 )
+ return;
+ if ( n >= threshold ) {
+ n = (int) ( n / loadFactor + 1 );
+ if ( n > MAXIMUM_CAPACITY )
+ n = MAXIMUM_CAPACITY;
+ int capacity = table.length;
+ while ( capacity < n )
+ capacity <<= 1;
+ resize( capacity );
+ }
+
+ for ( Iterator i = t.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<K,V> e = (Map.Entry<K,V>) i.next(); //FIXME should not have to cast
+ put( e.getKey(), e.getValue() );
+ }
+ }
+
+ /**
+ * Removes the mapping for this key from this map if present.
+ *
+ * @param key key whose mapping is to be removed from the map.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the map previously associated <tt>null</tt>
+ * with the specified key.
+ */
+ public V remove(Object key) {
+ Object k = maskNull( key );
+ int h = hash( k );
+ Entry<K,V>[] tab = getTable();
+ int i = indexFor( h, tab.length );
+ Entry<K,V> prev = tab[i];
+ Entry<K,V> e = prev;
+
+ while ( e != null ) {
+ Entry<K,V> next = e.next;
+ if ( h == e.hash && k == e.get() ) {
+ modCount++;
+ size--;
+ if ( prev == e )
+ tab[i] = next;
+ else
+ prev.next = next;
+ return e.value;
+ }
+ prev = e;
+ e = next;
+ }
+
+ return null;
+ }
+
+
+ /**
+ * Special version of remove needed by Entry set
+ */
+ Entry removeMapping(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return null;
+ Entry[] tab = getTable();
+ Map.Entry entry = (Map.Entry) o;
+ Object k = maskNull( entry.getKey() );
+ int h = hash( k );
+ int i = indexFor( h, tab.length );
+ Entry prev = tab[i];
+ Entry e = prev;
+
+ while ( e != null ) {
+ Entry next = e.next;
+ if ( h == e.hash && e.equals( entry ) ) {
+ modCount++;
+ size--;
+ if ( prev == e )
+ tab[i] = next;
+ else
+ prev.next = next;
+ return e;
+ }
+ prev = e;
+ e = next;
+ }
+
+ return null;
+ }
+
+ /**
+ * Removes all mappings from this map.
+ */
+ public void clear() {
+ // clear out ref queue. We don't need to expunge entries
+ // since table is getting cleared.
+ while ( queue.poll() != null )
+ ;
+
+ modCount++;
+ Entry tab[] = table;
+ for ( int i = 0; i < tab.length; ++i )
+ tab[i] = null;
+ size = 0;
+
+ // Allocation of array may have caused GC, which may have caused
+ // additional entries to go stale. Removing these entries from the
+ // reference queue will make them eligible for reclamation.
+ while ( queue.poll() != null )
+ ;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ *
+ * @param value value whose presence in this map is to be tested.
+ * @return <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ */
+ public boolean containsValue(Object value) {
+ if ( value == null )
+ return containsNullValue();
+
+ Entry tab[] = getTable();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( value.equals( e.value ) )
+ return true;
+ return false;
+ }
+
+ /**
+ * Special-case code for containsValue with null argument
+ */
+ private boolean containsNullValue() {
+ Entry tab[] = getTable();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( e.value == null )
+ return true;
+ return false;
+ }
+
+ /**
+ * Remove elements having the according value.
+ * Intended to avoid concurrent access exceptions
+ * It is expected that nobody add a key being removed by value
+ *
+ * @param value value whose presence in this map is to be removed.
+ * @return <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ */
+ public boolean removeValue(Object value) {
+ if ( value == null )
+ return removeNullValue();
+
+ Entry tab[] = getTable();
+ Set keys = new HashSet();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( value.equals( e.value ) )
+ keys.add( e.getKey() );
+ for ( Object key : keys ) remove( key );
+ return !keys.isEmpty();
+ }
+
+ /**
+ * Special-case code for removeValue with null argument
+ */
+ private boolean removeNullValue() {
+ Entry tab[] = getTable();
+ Set keys = new HashSet();
+ for ( int i = tab.length; i-- > 0; )
+ for ( Entry e = tab[i]; e != null; e = e.next )
+ if ( e.value == null )
+ keys.add( e.getKey() );
+ for ( Object key : keys ) remove( key );
+ return !keys.isEmpty();
+ }
+
+ /**
+ * The entries in this hash table extend WeakReference, using its main ref
+ * field as the key.
+ */
+ private static class Entry<K,V> extends WeakReference<K> implements Map.Entry<K,V> {
+ private V value;
+ private final int hash;
+ private Entry<K,V> next;
+
+ /**
+ * Create new entry.
+ */
+ Entry(K key, V value, ReferenceQueue queue,
+ int hash, Entry<K,V> next) {
+ super( key, queue );
+ this.value = value;
+ this.hash = hash;
+ this.next = next;
+ }
+
+ public K getKey() {
+ return WeakIdentityHashMap.unmaskNull( this.get() );
+ }
+
+ public V getValue() {
+ return value;
+ }
+
+ public V setValue(V newValue) {
+ V oldValue = value;
+ value = newValue;
+ return oldValue;
+ }
+
+ public boolean equals(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ Object k1 = getKey();
+ Object k2 = e.getKey();
+ if ( k1 == k2 ) {
+ Object v1 = getValue();
+ Object v2 = e.getValue();
+ if ( v1 == v2 || ( v1 != null && v1.equals( v2 ) ) )
+ return true;
+ }
+ return false;
+ }
+
+ public int hashCode() {
+ Object k = getKey();
+ Object v = getValue();
+ return ( ( k == null ?
+ 0 :
+ System.identityHashCode( k ) ) ^
+ ( v == null ?
+ 0 :
+ v.hashCode() ) );
+ }
+
+ public String toString() {
+ return getKey() + "=" + getValue();
+ }
+ }
+
+ private abstract class HashIterator<E> implements Iterator<E> {
+ int index;
+ Entry<K,V> entry = null;
+ Entry<K,V> lastReturned = null;
+ int expectedModCount = modCount;
+
+ /**
+ * Strong reference needed to avoid disappearance of key
+ * between hasNext and next
+ */
+ Object nextKey = null;
+
+ /**
+ * Strong reference needed to avoid disappearance of key
+ * between nextEntry() and any use of the entry
+ */
+ Object currentKey = null;
+
+ HashIterator() {
+ index = ( size() != 0 ?
+ table.length :
+ 0 );
+ }
+
+ public boolean hasNext() {
+ Entry[] t = table;
+
+ while ( nextKey == null ) {
+ Entry e = entry;
+ int i = index;
+ while ( e == null && i > 0 )
+ e = t[--i];
+ entry = e;
+ index = i;
+ if ( e == null ) {
+ currentKey = null;
+ return false;
+ }
+ nextKey = e.get(); // hold on to key in strong ref
+ if ( nextKey == null )
+ entry = entry.next;
+ }
+ return true;
+ }
+
+ /**
+ * The common parts of next() across different types of iterators
+ */
+ protected Entry<K,V> nextEntry() {
+ if ( modCount != expectedModCount )
+ throw new ConcurrentModificationException();
+ if ( nextKey == null && !hasNext() )
+ throw new NoSuchElementException();
+
+ lastReturned = entry;
+ entry = entry.next;
+ currentKey = nextKey;
+ nextKey = null;
+ return lastReturned;
+ }
+
+ public void remove() {
+ if ( lastReturned == null )
+ throw new IllegalStateException();
+ if ( modCount != expectedModCount )
+ throw new ConcurrentModificationException();
+
+ WeakIdentityHashMap.this.remove( currentKey );
+ expectedModCount = modCount;
+ lastReturned = null;
+ currentKey = null;
+ }
+
+ }
+
+ private class ValueIterator extends HashIterator {
+ public Object next() {
+ return nextEntry().value;
+ }
+ }
+
+ private class KeyIterator extends HashIterator {
+ public Object next() {
+ return nextEntry().getKey();
+ }
+ }
+
+ private class EntryIterator extends HashIterator<Map.Entry<K,V>> {
+ public Map.Entry<K,V> next() {
+ return nextEntry();
+ }
+ }
+
+ // Views
+
+ private transient Set<Map.Entry<K,V>> entrySet = null;
+
+ /**
+ * Returns a set view of the keys contained in this map. The set is
+ * backed by the map, so changes to the map are reflected in the set, and
+ * vice-versa. The set supports element removal, which removes the
+ * corresponding mapping from this map, via the <tt>Iterator.remove</tt>,
+ * <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt>, and
+ * <tt>clear</tt> operations. It does not support the <tt>add</tt> or
+ * <tt>addAll</tt> operations.
+ *
+ * @return a set view of the keys contained in this map.
+ */
+ public Set keySet() {
+ Set ks = keySet;
+ return ( ks != null ?
+ ks :
+ ( keySet = new KeySet() ) );
+ }
+
+ private class KeySet extends AbstractSet {
+ public Iterator iterator() {
+ return new KeyIterator();
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public boolean contains(Object o) {
+ return containsKey( o );
+ }
+
+ public boolean remove(Object o) {
+ if ( containsKey( o ) ) {
+ WeakIdentityHashMap.this.remove( o );
+ return true;
+ }
+ else
+ return false;
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray( a );
+ }
+ }
+
+ /**
+ * Returns a collection view of the values contained in this map. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from this map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the values contained in this map.
+ */
+ public Collection values() {
+ Collection vs = values;
+ return ( vs != null ?
+ vs :
+ ( values = new Values() ) );
+ }
+
+ private class Values extends AbstractCollection {
+ public Iterator iterator() {
+ return new ValueIterator();
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public boolean contains(Object o) {
+ return containsValue( o );
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( i.next() );
+ return c.toArray( a );
+ }
+ }
+
+ /**
+ * Returns a collection view of the mappings contained in this map. Each
+ * element in the returned collection is a <tt>Map.Entry</tt>. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from the map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the mappings contained in this map.
+ * @see java.util.Map.Entry
+ */
+ public Set<Map.Entry<K,V>> entrySet() {
+ Set<Map.Entry<K,V>> es = entrySet;
+ return ( es != null ?
+ es :
+ ( entrySet = new EntrySet() ) );
+ }
+
+ private class EntrySet extends AbstractSet<Map.Entry<K,V>> {
+ public Iterator<Map.Entry<K,V>> iterator() {
+ return new EntryIterator();
+ }
+
+ public boolean contains(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ Object k = e.getKey();
+ Entry candidate = getEntry( e.getKey() );
+ return candidate != null && candidate.equals( e );
+ }
+
+ public boolean remove(Object o) {
+ return removeMapping( o ) != null;
+ }
+
+ public int size() {
+ return WeakIdentityHashMap.this.size();
+ }
+
+ public void clear() {
+ WeakIdentityHashMap.this.clear();
+ }
+
+ public Object[] toArray() {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( new SimpleEntry( (Map.Entry) i.next() ) );
+ return c.toArray();
+ }
+
+ public Object[] toArray(Object a[]) {
+ Collection c = new ArrayList( size() );
+ for ( Iterator i = iterator(); i.hasNext(); )
+ c.add( new SimpleEntry( (Map.Entry) i.next() ) );
+ return c.toArray( a );
+ }
+ }
+
+ static class SimpleEntry implements Map.Entry {
+ Object key;
+ Object value;
+
+ public SimpleEntry(Object key, Object value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public SimpleEntry(Map.Entry e) {
+ this.key = e.getKey();
+ this.value = e.getValue();
+ }
+
+ public Object getKey() {
+ return key;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public Object setValue(Object value) {
+ Object oldValue = this.value;
+ this.value = value;
+ return oldValue;
+ }
+
+ public boolean equals(Object o) {
+ if ( !( o instanceof Map.Entry ) )
+ return false;
+ Map.Entry e = (Map.Entry) o;
+ return eq( key, e.getKey() ) && eq( value, e.getValue() );
+ }
+
+ public int hashCode() {
+ Object v;
+ return ( ( key == null ) ?
+ 0 :
+ key.hashCode() ) ^
+ ( ( value == null ) ?
+ 0 :
+ value.hashCode() );
+ }
+
+ public String toString() {
+ return key + "=" + value;
+ }
+
+ private static boolean eq(Object o1, Object o2) {
+ return ( o1 == null ?
+ o2 == null :
+ o1.equals( o2 ) );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/WeakIdentityHashMap.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/XMLHelper.java
===================================================================
--- search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/XMLHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/XMLHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,59 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.util;
+
+import java.io.ByteArrayInputStream;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * A utility class to help with xml parsing
+ *
+ * @author Lukasz Moren
+ */
+public class XMLHelper {
+
+
+ /**
+ * Converts a String representing an XML snippet into an {@link org.w3c.dom.Element}.
+ *
+ * @param xml snippet as a string
+ *
+ * @return a DOM Element
+ *
+ * @throws Exception if unable to parse the String or if it doesn't contain valid XML.
+ */
+ public static Element elementFromString(String xml) throws Exception {
+ ByteArrayInputStream bais = new ByteArrayInputStream( xml.getBytes( "UTF-8" ) );
+ DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
+ DocumentBuilder builder = factory.newDocumentBuilder();
+ Document document = builder.parse( bais );
+ bais.close();
+ return document.getDocumentElement();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/main/java/org/hibernate/search/util/XMLHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Property changes on: search/trunk/hibernate-search/src/main/javadoc
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/main/javadoc/jdstyle.css
===================================================================
--- search/trunk/hibernate-search/src/main/javadoc/jdstyle.css (rev 0)
+++ search/trunk/hibernate-search/src/main/javadoc/jdstyle.css 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,117 @@
+/* Javadoc style sheet */
+
+/* Define colors, fonts and other style attributes here to override the defaults */
+
+/* Page background color */
+body { font-family: Arial;
+ background-color: white;
+ font-size: 10pt;
+ }
+td { font-family: Arial;
+ font-size: 10pt;
+ }
+/* Table colors */
+.TableHeadingColor { background: #F4F4F4 }
+.TableSubHeadingColor { background: #F4F4F4 }
+.TableRowColor { background: #FFFFFF }
+
+/* Font used in left-hand frame lists */
+.FrameTitleFont { font-size: normal; font-family: Arial }
+.FrameHeadingFont { font-size: normal; font-family: Arial }
+.FrameItemFont { font-size: normal; font-family: Arial }
+
+/* Example of smaller, sans-serif font in frames */
+/* .FrameItemFont { font-size: 10pt; font-family: Helvetica, Arial, sans-serif } */
+
+/* Navigation bar fonts and colors */
+.NavBarCell1 { background-color:#F4F4F4;}
+.NavBarCell1Rev { background-color:silver;}
+
+.NavBarFont1 { font-family: Arial, Helvetica, sans-serif; color:#000000;}
+.NavBarFont1Rev { font-family: Arial, Helvetica, sans-serif; color:#FFFFFF;}
+
+.NavBarCell2 { font-family: Arial, Helvetica, sans-serif; background-color:#FFFFFF;}
+.NavBarCell3 { font-family: Arial, Helvetica, sans-serif; background-color:#FFFFFF;}
+
+A {
+ color: #003399;
+}
+
+A:active {
+ color: #003399;
+}
+
+A:visited {
+ color: #888888;
+}
+
+P, OL, UL, LI, DL, DT, DD, BLOCKQUOTE {
+ color: #000000;
+}
+
+TD, TH, SPAN {
+ color: #000000;
+}
+
+BLOCKQUOTE {
+ margin-right: 0px;
+}
+
+
+/*H1, H2, H3, H4, H5, H6 {
+ color: #000000;
+ font-weight:500;
+ margin-top:10px;
+ padding-top:15px;
+}
+
+H1 { font-size: 150%; }
+H2 { font-size: 140%; }
+H3 { font-size: 110%; font-weight: bold; }
+H4 { font-size: 110%; font-weight: bold;}
+H5 { font-size: 100%; font-style: italic; }
+H6 { font-size: 100%; font-style: italic; }*/
+
+TT {
+font-size: 90%;
+ font-family: "Courier New", Courier, monospace;
+ color: #000000;
+}
+
+PRE {
+font-size: 90%;
+ padding: 5px;
+ border-style: solid;
+ border-width: 1px;
+ border-color: #CCCCCC;
+ background-color: #F4F4F4;
+}
+
+UL, OL, LI {
+ list-style: disc;
+}
+
+HR {
+ width: 100%;
+ height: 1px;
+ background-color: #CCCCCC;
+ border-width: 0px;
+ padding: 0px;
+ color: #CCCCCC;
+}
+
+.variablelist {
+ padding-top: 10;
+ padding-bottom:10;
+ margin:0;
+}
+
+.itemizedlist, UL {
+ padding-top: 0;
+ padding-bottom:0;
+ margin:0;
+}
+
+.term {
+ font-weight:bold;
+}
Added: search/trunk/hibernate-search/src/main/javadoc/package.html
===================================================================
--- search/trunk/hibernate-search/src/main/javadoc/package.html (rev 0)
+++ search/trunk/hibernate-search/src/main/javadoc/package.html 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1 @@
+<body></body>
Property changes on: search/trunk/hibernate-search/src/test/java/org
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/AlternateDocument.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/AlternateDocument.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/AlternateDocument.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,100 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Boost;
+
+/**
+ * Example of 2 entities mapped in the same index
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "Documents")
+public class AlternateDocument {
+ private Long id;
+ private String title;
+ private String summary;
+ private String text;
+
+ AlternateDocument() {
+ }
+
+ public AlternateDocument(Long id, String title, String summary, String text) {
+ super();
+ this.id = id;
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ @Id
+ @DocumentId()
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Field( name = "alt_title", store = Store.YES, index = Index.TOKENIZED )
+ @Boost(2)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field( name="Abstract", store = Store.NO, index = Index.TOKENIZED )
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ @Lob
+ @Field( store = Store.NO, index = Index.TOKENIZED )
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/AlternateDocument.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Clock.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Clock.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Clock.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Clock {
+ private Integer id;
+ private String brand;
+
+ public Clock(Integer id, String brand) {
+ this.id = id;
+ this.brand = brand;
+ }
+
+ @Field(index= Index.TOKENIZED, store= Store.YES) public String getBrand() {
+ return brand;
+ }
+
+ public void setBrand(String brand) {
+ this.brand = brand;
+ }
+
+ @Id @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Clock.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Document.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Document.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Document.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Indexed(index = "Documents")
+public class Document {
+ private Long id;
+ private String title;
+ private String summary;
+ private String text;
+
+ Document() {
+ }
+
+ public Document(String title, String summary, String text) {
+ super();
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Field( store = Store.YES, index = Index.TOKENIZED )
+ @Boost(2)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field( name="Abstract", store = Store.NO, index = Index.TOKENIZED )
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ @Lob
+ @Field( store = Store.NO, index = Index.TOKENIZED )
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/Document.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/FSDirectoryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/FSDirectoryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,227 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+
+import org.hibernate.Session;
+import org.hibernate.search.Environment;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * @author Gavin King
+ */
+public class FSDirectoryTest extends SearchTestCase {
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for (File file : files) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ public void testEventIntegration() throws Exception {
+
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ Directory dir = FSDirectory.open( new File( getBaseIndexDir(), "Documents" ) );
+ try {
+ IndexReader reader = IndexReader.open( dir, true );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ TermDocs docs = reader.termDocs( new Term( "Abstract", "hibernate" ) );
+ assertTrue( docs.next() );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ docs = reader.termDocs( new Term( "title", "action" ) );
+ assertTrue( docs.next() );
+ doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ assertEquals( "1", doc.getField( "id" ).stringValue() );
+ }
+ finally {
+ reader.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ Document entity = (Document) s.get( Document.class, Long.valueOf( 1 ) );
+ entity.setSummary( "Object/relational mapping with EJB3" );
+ s.persist( new Document( "Seam in Action", "", "blah blah blah blah" ) );
+ s.getTransaction().commit();
+ s.close();
+
+ reader = IndexReader.open( dir, true );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 2, num );
+ TermDocs docs = reader.termDocs( new Term( "Abstract", "ejb" ) );
+ assertTrue( docs.next() );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ }
+ finally {
+ reader.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( entity );
+ s.getTransaction().commit();
+ s.close();
+
+ reader = IndexReader.open( dir, true );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ TermDocs docs = reader.termDocs( new Term( "title", "seam" ) );
+ assertTrue( docs.next() );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ assertEquals( "2", doc.getField( "id" ).stringValue() );
+ }
+ finally {
+ reader.close();
+ }
+ }
+ finally {
+ dir.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.createCriteria( Document.class ).uniqueResult() );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ public void testBoost() throws Exception {
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object and Relational", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Object and Relational", "Hibernate in Action", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ FSDirectory dir = FSDirectory.open( new File( getBaseIndexDir(), "Documents" ) );
+ IndexSearcher searcher = new IndexSearcher( dir, true );
+ try {
+ QueryParser qp = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query = qp.parse( "title:Action OR Abstract:Action" );
+ TopDocs hits = searcher.search( query, 1000 );
+ assertEquals( 2, hits.totalHits );
+ assertTrue( hits.scoreDocs[0].score == 2 * hits.scoreDocs[1].score );
+ org.apache.lucene.document.Document doc = searcher.doc( 0 );
+ assertEquals( "Hibernate in Action", doc.get( "title" ) );
+ }
+ finally {
+ searcher.close();
+ dir.close();
+ }
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ List list = s.createQuery( "from Document" ).list();
+ for (Document document : (List<Document>) list) {
+ s.delete( document );
+ }
+ s.getTransaction().commit();
+ s.close();
+ getSessions().close(); //run the searchfactory.close() operations
+ }
+
+ public void testSearchOnDeletedIndex() throws Exception {
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist( new Document( "Hibernate Search in Action", "", "" ) );
+ s.getTransaction().commit();
+ s.close();
+
+ Directory dir = FSDirectory.open( new File( getBaseIndexDir(), "Documents" ) );
+ IndexSearcher searcher = new IndexSearcher( dir, true );
+ // deleting before search, but after IndexSearcher creation:
+ // ( fails when deleting -concurrently- to IndexSearcher initialization! )
+ FileHelper.delete( getBaseIndexDir() );
+ TermQuery query = new TermQuery( new Term( "title", "action" ) );
+ TopDocs hits = searcher.search( query, 1000 );
+ assertEquals( 1, hits.totalHits );
+ org.apache.lucene.document.Document doc = searcher.doc( 0 );
+ assertEquals( "Hibernate Search in Action", doc.get( "title" ) );
+ searcher.close();
+ dir.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Document.class
+ };
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/FSDirectoryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/PurgeTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/PurgeTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/PurgeTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,138 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.query.AlternateBook;
+import org.hibernate.search.test.query.Author;
+import org.hibernate.search.test.query.Book;
+import org.hibernate.search.test.query.Clock;
+import org.hibernate.search.test.query.Employee;
+
+/**
+ * Test the PURGE and PURGE_ALL functionality.
+ *
+ * @author John Griffin
+ */
+public class PurgeTest extends SearchTestCase {
+
+ public void testPurge() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ org.hibernate.search.test.query.Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "brand", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "brand:Seiko" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List results = hibQuery.list();
+ assertEquals("incorrect test record", 1, results.size());
+ assertEquals("incorrect test record", 1, ((Clock)results.get( 0 )).getId().intValue());
+
+ s.purge( Clock.class, ((Clock)results.get( 0 )).getId());
+
+ tx.commit();
+
+ tx = s.beginTransaction();
+
+ query = parser.parse( "brand:Festina or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ results = hibQuery.list();
+ assertEquals("incorrect test record count", 1, results.size());
+ assertEquals("incorrect test record", 2, ((Clock)results.get( 0 )).getId().intValue());
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testPurgeAll() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ org.hibernate.search.test.query.Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ clock = new Clock( 3, "Longine" );
+ s.save( clock );
+ clock = new Clock( 4, "Rolex" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "brand", SearchTestCase.stopAnalyzer );
+ tx = s.beginTransaction();
+ s.purgeAll( Clock.class);
+
+ tx.commit();
+
+ tx = s.beginTransaction();
+
+ Query query = parser.parse( "brand:Festina or brand:Seiko or brand:Longine or brand:Rolex" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List results = hibQuery.list();
+ assertEquals("class not completely purged", 0, results.size());
+
+ query = parser.parse( "summary:Festina or summary:gloire" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ results = hibQuery.list();
+ assertEquals("incorrect class purged", 2, results.size());
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ AlternateBook.class,
+ Clock.class,
+ Author.class,
+ Employee.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/PurgeTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/RamDirectoryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/RamDirectoryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/RamDirectoryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,92 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import org.hibernate.Session;
+import org.hibernate.search.Search;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class RamDirectoryTest extends SearchTestCase {
+
+ public void testMultipleEntitiesPerIndex() throws Exception {
+
+
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ Document document =
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" );
+ s.persist(document);
+ s.flush();
+ s.persist(
+ new AlternateDocument( document.getId(), "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( 2, getDocumentNbr() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ TermQuery q = new TermQuery(new Term("alt_title", "hibernate"));
+ assertEquals( "does not properly filter", 0,
+ Search.getFullTextSession( s ).createFullTextQuery( q, Document.class ).list().size() );
+ assertEquals( "does not properly filter", 1,
+ Search.getFullTextSession( s ).createFullTextQuery( q, Document.class, AlternateDocument.class ).list().size() );
+ s.delete( s.get( AlternateDocument.class, document.getId() ) );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( 1, getDocumentNbr() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.createCriteria( Document.class ).uniqueResult() );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ private int getDocumentNbr() throws Exception {
+ IndexReader reader = IndexReader.open( getDirectory( Document.class ), false );
+ try {
+ return reader.numDocs();
+ }
+ finally {
+ reader.close();
+ }
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[]{
+ Document.class,
+ AlternateDocument.class
+ };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/RamDirectoryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SearchTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SearchTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SearchTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,198 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.io.File;
+import java.io.InputStream;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.KeywordAnalyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Version;
+import org.slf4j.Logger;
+
+import org.hibernate.HibernateException;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.dialect.Dialect;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.impl.SessionFactoryImpl;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.tool.hbm2ddl.SchemaExport;
+
+/**
+ * Base class for Hibernate Search unit tests.
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class SearchTestCase extends TestCase {
+
+ private static final Logger log = org.hibernate.search.util.LoggerFactory.make();
+
+ public static Analyzer standardAnalyzer = new StandardAnalyzer( getTargetLuceneVersion() );
+ public static Analyzer stopAnalyzer = new StopAnalyzer( getTargetLuceneVersion() );
+ public static Analyzer simpleAnalyzer = new SimpleAnalyzer();
+ public static Analyzer keywordAnalyzer = new KeywordAnalyzer();
+
+ private static File indexDir;
+
+ private SearchFactory searchFactory;
+
+ static {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
+ buildDir = ".";
+ }
+ File current = new File( buildDir );
+ indexDir = new File( current, "indextemp" );
+ log.debug( "Using {} as index directory.", indexDir.getAbsolutePath() );
+ }
+
+ protected void setUp() throws Exception {
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ ensureIndexesAreEmpty();
+ }
+
+ protected void tearDown() throws Exception {
+ SchemaExport export = new SchemaExport( cfg );
+ export.drop( false, true );
+ searchFactory = null;
+ }
+
+ protected Directory getDirectory(Class<?> clazz) {
+ return getLuceneEventListener().getSearchFactoryImplementor().getDirectoryProviders( clazz )[0].getDirectory();
+ }
+
+ private FullTextIndexEventListener getLuceneEventListener() {
+ PostInsertEventListener[] listeners = ( ( SessionFactoryImpl ) getSessions() ).getEventListeners()
+ .getPostInsertEventListeners();
+ FullTextIndexEventListener listener = null;
+ //FIXME this sucks since we mandate the event listener use
+ for ( PostInsertEventListener candidate : listeners ) {
+ if ( candidate instanceof FullTextIndexEventListener ) {
+ listener = ( FullTextIndexEventListener ) candidate;
+ break;
+ }
+ }
+ if ( listener == null ) {
+ throw new HibernateException( "Lucene event listener not initialized" );
+ }
+ return listener;
+ }
+
+ protected void ensureIndexesAreEmpty() {
+ if ( "jms".equals( getCfg().getProperty( "hibernate.search.worker.backend" ) ) ) {
+ log.debug( "JMS based test. Skipping index emptying" );
+ return;
+ }
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx;
+ tx = s.beginTransaction();
+ for ( Class<?> clazz : getMappings() ) {
+ if ( clazz.getAnnotation( Indexed.class ) != null ) {
+ s.purgeAll( clazz );
+ }
+ }
+ tx.commit();
+ s.close();
+ }
+
+ protected SearchFactory getSearchFactory() {
+ if ( searchFactory == null ) {
+ Session session = openSession();
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+ searchFactory = fullTextSession.getSearchFactory();
+ fullTextSession.close();
+ }
+ return searchFactory;
+ }
+
+ protected void configure(Configuration cfg) {
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( "hibernate.search.default.indexBase", indexDir.getAbsolutePath() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
+ cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
+ }
+
+ protected File getBaseIndexDir() {
+ return indexDir;
+ }
+
+ protected void buildSessionFactory(Class<?>[] classes, String[] packages, String[] xmlFiles) throws Exception {
+ if ( getSessions() != null ) {
+ getSessions().close();
+ }
+ try {
+ setCfg( new AnnotationConfiguration() );
+ configure( cfg );
+ if ( recreateSchema() ) {
+ cfg.setProperty( org.hibernate.cfg.Environment.HBM2DDL_AUTO, "create-drop" );
+ }
+ for ( String aPackage : packages ) {
+ ( ( AnnotationConfiguration ) getCfg() ).addPackage( aPackage );
+ }
+ for ( Class<?> aClass : classes ) {
+ ( ( AnnotationConfiguration ) getCfg() ).addAnnotatedClass( aClass );
+ }
+ for ( String xmlFile : xmlFiles ) {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFile );
+ getCfg().addInputStream( is );
+ }
+ setDialect( Dialect.getDialect() );
+ setSessions( getCfg().buildSessionFactory( /*new TestInterceptor()*/ ) );
+ }
+ catch ( Exception e ) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+
+ protected abstract Class<?>[] getMappings();
+
+ protected String[] getAnnotatedPackages() {
+ return new String[] { };
+ }
+
+ protected static File getIndexDir() {
+ return indexDir;
+ }
+
+ public static Version getTargetLuceneVersion() {
+ return Version.LUCENE_29;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SearchTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SerializationTestHelper.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SerializationTestHelper.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SerializationTestHelper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,131 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hibernate.search.test.SerializationTestHelper.Foo.TestInnerClass;
+
+import junit.framework.TestCase;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class SerializationTestHelper extends TestCase {
+
+ /**
+ * Duplicates an object using Serialization, it moves
+ * state to and from a buffer. Should be used to test
+ * correct serializability.
+ * @param o The object to "clone"
+ * @return the clone.
+ * @throws IOException
+ * @throws ClassNotFoundException
+ */
+ public static Object duplicateBySerialization(Object o) throws IOException, ClassNotFoundException {
+ //Serialize to buffer:
+ java.io.ByteArrayOutputStream outStream = new java.io.ByteArrayOutputStream();
+ ObjectOutputStream objectOutStream = new ObjectOutputStream( outStream );
+ objectOutStream.writeObject( o );
+ objectOutStream.flush();
+ objectOutStream.close();
+ //buffer version of Object:
+ byte[] objectBuffer = outStream.toByteArray();
+ //deserialize to new instance:
+ java.io.ByteArrayInputStream inStream = new ByteArrayInputStream( objectBuffer );
+ ObjectInputStream objectInStream = new ObjectInputStream( inStream );
+ Object copy = objectInStream.readObject();
+ return copy;
+ }
+
+ public void testSelf() throws IOException, ClassNotFoundException {
+ Foo a = new Foo();
+ a.list.add( new TestInnerClass(30) );
+ Foo b = (Foo) duplicateBySerialization( a );
+ assertEquals( Integer.valueOf(6), a.integer);
+ assertEquals( Integer.valueOf(7), b.integer);
+ assertEquals( a.list, b.list );
+ }
+
+ static class Foo implements Serializable {
+
+ List<TestInnerClass> list = new ArrayList<TestInnerClass>();
+ transient Integer integer = Integer.valueOf( 6 );
+
+ static class TestInnerClass implements Serializable {
+ private final int v;
+
+ public TestInnerClass(int i) {
+ v = i;
+ }
+
+ public void print() {
+ System.out.println(v);
+ }
+
+ public String toString(){
+ return ""+v;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + v;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ final TestInnerClass other = (TestInnerClass) obj;
+ if (v != other.v)
+ return false;
+ return true;
+ }
+ }
+
+ private void readObject(ObjectInputStream aInputStream) throws ClassNotFoundException, IOException {
+ aInputStream.defaultReadObject();
+ integer = Integer.valueOf( 7 );
+ }
+
+ private void writeObject(ObjectOutputStream aOutputStream) throws IOException {
+ aOutputStream.defaultWriteObject();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/SerializationTestHelper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,189 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.io.InputStream;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+
+import org.hibernate.HibernateException;
+import org.hibernate.Interceptor;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.Environment;
+import org.hibernate.dialect.Dialect;
+import org.hibernate.event.FlushEventListener;
+import org.hibernate.event.def.DefaultFlushEventListener;
+import org.hibernate.search.event.FullTextIndexEventListener;
+
+/**
+ * A modified base class for tests without annotations.
+ *
+ * @author Hardy Ferentschik
+ */
+public abstract class TestCase extends junit.framework.TestCase {
+
+ protected static SessionFactory sessions;
+ protected static Configuration cfg;
+ protected static Dialect dialect;
+ protected static Class lastTestClass;
+ protected Session session;
+
+ public TestCase() {
+ super();
+ }
+
+ public TestCase(String x) {
+ super( x );
+ }
+
+ protected void buildSessionFactory(String[] xmlFiles) throws Exception {
+
+ if ( getSessions() != null ) {
+ getSessions().close();
+ }
+ try {
+ setCfg( new Configuration() );
+ configure( cfg );
+ if ( recreateSchema() ) {
+ cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
+ }
+ for ( String xmlFile : xmlFiles ) {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFile );
+ getCfg().addInputStream( is );
+ }
+ setDialect( Dialect.getDialect() );
+ setSessions( getCfg().buildSessionFactory() );
+ }
+ catch ( Exception e ) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+
+ protected void setUp() throws Exception {
+ if ( getSessions() == null || getSessions().isClosed() || lastTestClass != getClass() ) {
+ buildSessionFactory( getXmlFiles() );
+ lastTestClass = getClass();
+ }
+ }
+
+ protected void runTest() throws Throwable {
+ try {
+ super.runTest();
+ if ( session != null && session.isOpen() ) {
+ if ( session.isConnected() ) {
+ session.connection().rollback();
+ }
+ session.close();
+ session = null;
+ fail( "unclosed session" );
+ }
+ else {
+ session = null;
+ }
+ }
+ catch ( Throwable e ) {
+ try {
+ if ( session != null && session.isOpen() ) {
+ if ( session.isConnected() ) {
+ session.connection().rollback();
+ }
+ session.close();
+ }
+ }
+ catch ( Exception ignore ) {
+ }
+ try {
+ if ( sessions != null ) {
+ sessions.close();
+ sessions = null;
+ }
+ }
+ catch ( Exception ignore ) {
+ }
+ throw e;
+ }
+ }
+
+ public Session openSession() throws HibernateException {
+ session = getSessions().openSession();
+ return session;
+ }
+
+ public Session openSession(Interceptor interceptor) throws HibernateException {
+ session = getSessions().openSession( interceptor );
+ return session;
+ }
+
+ protected String[] getXmlFiles() {
+ return new String[] { };
+ }
+
+ protected void setSessions(SessionFactory sessions) {
+ TestCase.sessions = sessions;
+ }
+
+ protected SessionFactory getSessions() {
+ return sessions;
+ }
+
+ protected void setDialect(Dialect dialect) {
+ TestCase.dialect = dialect;
+ }
+
+ protected Dialect getDialect() {
+ return dialect;
+ }
+
+ protected static void setCfg(Configuration cfg) {
+ TestCase.cfg = cfg;
+ }
+
+ protected static Configuration getCfg() {
+ return cfg;
+ }
+
+ protected void configure(Configuration cfg) {
+ //needs to register all event listeners:
+ cfg.setListener( "post-update", "org.hibernate.search.event.FullTextIndexEventListener" );
+ cfg.setListener( "post-insert", "org.hibernate.search.event.FullTextIndexEventListener" );
+ cfg.setListener( "post-delete", "org.hibernate.search.event.FullTextIndexEventListener" );
+ cfg.setListener( "post-collection-recreate", "org.hibernate.search.event.FullTextIndexEventListener" );
+ cfg.setListener( "post-collection-remove", "org.hibernate.search.event.FullTextIndexEventListener" );
+ cfg.setListener( "post-collection-update", "org.hibernate.search.event.FullTextIndexEventListener" );
+
+ cfg.setListeners( "flush", new FlushEventListener[]{new DefaultFlushEventListener(), new FullTextIndexEventListener()} );
+
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( org.hibernate.search.Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ protected boolean recreateSchema() {
+ return true;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TransactionTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TransactionTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TransactionTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+
+import org.hibernate.Session;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TransactionTest extends SearchTestCase {
+
+ public void testTransactionCommit() throws Exception {
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Lucene in Action", "FullText search engine", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Hibernate Search in Action", "ORM and FullText search engine", "blah blah blah" )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( "transaction.commit() should index", 3, getDocumentNumber() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.persist(
+ new Document(
+ "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah"
+ )
+ );
+ s.flush();
+ s.getTransaction().rollback();
+ s.close();
+
+ assertEquals( "rollback() should not index", 3, getDocumentNumber() );
+
+ s = getSessions().openSession();
+ s.connection().setAutoCommit( true ); // www.hibernate.org/403.html
+ s.persist(
+ new Document(
+ "Java Persistence with Hibernate", "Object/relational mapping with Hibernate", "blah blah blah"
+ )
+ );
+ s.flush();
+ s.close();
+
+ assertEquals( "no transaction should index", 4, getDocumentNumber() );
+
+ }
+
+ private int getDocumentNumber() throws IOException {
+ IndexReader reader = IndexReader.open( getDirectory( Document.class ), false );
+ try {
+ return reader.numDocs();
+ }
+ finally {
+ reader.close();
+ }
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { Document.class };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/TransactionTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import java.io.Reader;
+import java.io.IOException;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Token;
+
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class AbstractTestAnalyzer extends Analyzer {
+
+ protected abstract String[] getTokens();
+
+ public TokenStream tokenStream(String fieldName, Reader reader) {
+ return new InternalTokenStream();
+ }
+
+ private class InternalTokenStream extends TokenStream {
+ private int position;
+
+ public Token next(final Token reusableToken) throws IOException {
+ assert reusableToken != null;
+ if ( position >= getTokens().length ) {
+ return null;
+ }
+ else {
+ reusableToken.clear();
+ reusableToken.setTermBuffer( getTokens()[position++] );
+ reusableToken.setStartOffset( 0 );
+ reusableToken.setEndOffset( 0 );
+ return reusableToken;
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AlarmEntity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AlarmEntity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AlarmEntity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+@Entity
+@Indexed(index="idx1")
+@Analyzer(impl = Test1Analyzer.class)
+public class AlarmEntity {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED)
+ private String property;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getProperty() {
+ return property;
+ }
+ public void setProperty(String property) {
+ this.property = property;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AlarmEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AnalyzerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AnalyzerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AnalyzerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,185 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.queryParser.QueryParser;
+import org.slf4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.impl.InitContext;
+import org.hibernate.search.cfg.SearchConfigurationFromHibernateCore;
+import org.hibernate.search.engine.DocumentBuilderContainedEntity;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.util.AnalyzerUtils;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class AnalyzerTest extends SearchTestCase {
+
+ public static final Logger log = LoggerFactory.make();
+
+ public void testAnalyzerDiscriminator() throws Exception {
+ Article germanArticle = new Article();
+ germanArticle.setLanguage( "de" );
+ germanArticle.setText( "aufeinanderschl\u00FCgen" );
+ Set<Article> references = new HashSet<Article>();
+ references.add( germanArticle );
+
+
+ Article englishArticle = new Article();
+ englishArticle.setLanguage( "en" );
+ englishArticle.setText( "acknowledgment" );
+ englishArticle.setReferences( references );
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( englishArticle );
+ tx.commit();
+
+ tx = s.beginTransaction();
+
+ // at query time we use a standard analyzer. We explicitly search for tokens which can only be found if the
+ // right language specific stemmer was used at index time
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "references.text", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "aufeinanderschlug" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ assertEquals( 1, query.getResultSize() );
+
+ parser = new QueryParser( getTargetLuceneVersion(), "text", SearchTestCase.standardAnalyzer );
+ luceneQuery = parser.parse( "acknowledg" );
+ query = s.createFullTextQuery( luceneQuery );
+ assertEquals( 1, query.getResultSize() );
+
+ tx.commit();
+ s.close();
+ }
+
+ public void testMultipleAnalyzerDiscriminatorDefinitions() {
+ SearchConfigurationFromHibernateCore searchConfig = new SearchConfigurationFromHibernateCore( cfg );
+ ReflectionManager reflectionManager = searchConfig.getReflectionManager();
+ XClass xclass = reflectionManager.toXClass( BlogEntry.class );
+ InitContext context = new InitContext( searchConfig );
+ try {
+ new DocumentBuilderContainedEntity( xclass, context, reflectionManager );
+ fail();
+ }
+ catch ( SearchException e ) {
+ assertTrue( "Wrong error message", e.getMessage().startsWith( "Multiple AnalyzerDiscriminator defined in the same class hierarchy" ));
+ }
+ }
+
+ public void testScopedAnalyzers() throws Exception {
+ MyEntity en = new MyEntity();
+ en.setEntity( "Entity" );
+ en.setField( "Field" );
+ en.setProperty( "Property" );
+ en.setComponent( new MyComponent() );
+ en.getComponent().setComponentProperty( "component property" );
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( en );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "entity:alarm" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery, MyEntity.class );
+ assertEquals( 1, query.getResultSize() );
+
+ luceneQuery = parser.parse( "property:cat" );
+ query = s.createFullTextQuery( luceneQuery, MyEntity.class );
+ assertEquals( 1, query.getResultSize() );
+
+ luceneQuery = parser.parse( "field:energy" );
+ query = s.createFullTextQuery( luceneQuery, MyEntity.class );
+ assertEquals( 1, query.getResultSize() );
+
+ luceneQuery = parser.parse( "component.componentProperty:noise" );
+ query = s.createFullTextQuery( luceneQuery, MyEntity.class );
+ assertEquals( 1, query.getResultSize() );
+
+ s.delete( query.uniqueResult() );
+ tx.commit();
+
+ s.close();
+ }
+
+ public void testScopedAnalyzersFromSearchFactory() throws Exception {
+ FullTextSession session = Search.getFullTextSession( openSession() );
+ SearchFactory searchFactory = session.getSearchFactory();
+ Analyzer analyzer = searchFactory.getAnalyzer( MyEntity.class );
+
+ // you can pass what so ever into the analysis since the used analyzers are
+ // returning the same tokens all the time. We just want to make sure that
+ // the right analyzers are used.
+ Token[] tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "entity", "" );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "alarm", "dog", "performance" } );
+
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "property", "" );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "sound", "cat", "speed" } );
+
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "field", "" );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "music", "elephant", "energy" } );
+
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "component.componentProperty", "" );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "noise", "mouse", "light" } );
+
+ // test border cases
+ try {
+ searchFactory.getAnalyzer( ( Class ) null );
+ }
+ catch ( IllegalArgumentException iae ) {
+ log.debug( "success" );
+ }
+
+ try {
+ searchFactory.getAnalyzer( String.class );
+ }
+ catch ( IllegalArgumentException iae ) {
+ log.debug( "success" );
+ }
+
+ session.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { MyEntity.class, Article.class };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/AnalyzerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Article.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Article.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Article.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,117 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.apache.solr.analysis.EnglishPorterFilterFactory;
+import org.apache.solr.analysis.GermanStemFilterFactory;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.AnalyzerDiscriminator;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.TokenizerDef;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class
+ )
+ }),
+ @AnalyzerDef(name = "de",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = GermanStemFilterFactory.class)
+ })
+})
+public class Article {
+
+ private Integer id;
+ private String language;
+ private String text;
+ private Set<Article> references;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES)
+ @AnalyzerDiscriminator(impl = LanguageDiscriminator.class)
+ public String getLanguage() {
+ return language;
+ }
+
+ public void setLanguage(String language) {
+ this.language = language;
+ }
+
+ @Field(store = Store.YES)
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+
+ @OneToMany(cascade = CascadeType.ALL)
+ @IndexedEmbedded(depth = 1)
+ public Set<Article> getReferences() {
+ return references;
+ }
+
+ public void setReferences(Set<Article> references) {
+ this.references = references;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Article.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/BlogEntry.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/BlogEntry.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/BlogEntry.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,117 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.apache.solr.analysis.EnglishPorterFilterFactory;
+import org.apache.solr.analysis.GermanStemFilterFactory;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.AnalyzerDiscriminator;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.TokenizerDef;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class
+ )
+ }),
+ @AnalyzerDef(name = "de",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = GermanStemFilterFactory.class)
+ })
+})
+@AnalyzerDiscriminator(impl = LanguageDiscriminator.class)
+public class BlogEntry {
+
+ private Integer id;
+ private String language;
+ private String text;
+ private Set<BlogEntry> references;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES)
+ @AnalyzerDiscriminator(impl = LanguageDiscriminator.class)
+ public String getLanguage() {
+ return language;
+ }
+
+ public void setLanguage(String language) {
+ this.language = language;
+ }
+
+ @Field(store = Store.YES)
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+
+ @OneToMany(cascade = CascadeType.ALL)
+ @IndexedEmbedded(depth = 1)
+ public Set<BlogEntry> getReferences() {
+ return references;
+ }
+
+ public void setReferences(Set<BlogEntry> references) {
+ this.references = references;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/BlogEntry.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/DoubleAnalyzerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/DoubleAnalyzerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/DoubleAnalyzerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,84 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.slf4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Test for http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH-263
+ * @author Sanne Grinovero
+ */
+public class DoubleAnalyzerTest extends SearchTestCase {
+
+ public static final Logger log = LoggerFactory.make();
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { MyEntity.class, AlarmEntity.class };
+ }
+
+ public void testScopedAnalyzers() throws Exception {
+ MyEntity en = new MyEntity();
+ en.setEntity( "anyNotNull" );
+ AlarmEntity alarmEn = new AlarmEntity();
+ alarmEn.setProperty( "notNullAgain" );
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( en );
+ s.persist( alarmEn );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ {
+ Query luceneQuery = new MatchAllDocsQuery();
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ assertEquals( 2, query.getResultSize() );
+ }
+ {
+ Query luceneQuery = parser.parse( "entity:alarm" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery, MyEntity.class );
+ assertEquals( 1, query.getResultSize() );
+ }
+ {
+ Query luceneQuery = parser.parse( "property:sound" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery, AlarmEntity.class );
+ assertEquals( 0, query.getResultSize() );
+ }
+
+ tx.commit();
+ s.close();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/DoubleAnalyzerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/LanguageDiscriminator.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/LanguageDiscriminator.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/LanguageDiscriminator.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import org.hibernate.search.analyzer.Discriminator;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class LanguageDiscriminator implements Discriminator {
+
+ public String getAnalyzerDefinitionName(Object value, Object entity, String field) {
+ if ( value == null || !( entity instanceof Article ) ) {
+ return null;
+ }
+ return (String) value;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/LanguageDiscriminator.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyComponent.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyComponent.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyComponent.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MyComponent {
+ @Field(index = Index.TOKENIZED)
+ @Analyzer(impl = Test4Analyzer.class)
+ private String componentProperty;
+
+ public String getComponentProperty() {
+ return componentProperty;
+ }
+
+ public void setComponentProperty(String componentProperty) {
+ this.componentProperty = componentProperty;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyComponent.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyEntity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyEntity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyEntity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,105 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Embedded;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="idx1")
+@Analyzer(impl = Test1Analyzer.class)
+public class MyEntity {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED)
+ private String entity;
+
+ @Field(index = Index.TOKENIZED)
+ @Analyzer(impl = Test2Analyzer.class)
+ private String property;
+
+ @Field(index = Index.TOKENIZED, analyzer = @Analyzer(impl = Test3Analyzer.class) )
+ @Analyzer(impl = Test2Analyzer.class)
+ private String field;
+
+ @IndexedEmbedded
+ @Embedded
+ private MyComponent component;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getEntity() {
+ return entity;
+ }
+
+ public void setEntity(String entity) {
+ this.entity = entity;
+ }
+
+ public String getProperty() {
+ return property;
+ }
+
+ public void setProperty(String property) {
+ this.property = property;
+ }
+
+ public String getField() {
+ return field;
+ }
+
+ public void setField(String field) {
+ this.field = field;
+ }
+
+ public MyComponent getComponent() {
+ return component;
+ }
+
+ public void setComponent(MyComponent component) {
+ this.component = component;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/MyEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test1Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test1Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test1Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class Test1Analyzer extends AbstractTestAnalyzer {
+ private final String[] tokens = { "alarm", "dog", "performance" };
+
+ protected String[] getTokens() {
+ return tokens;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test1Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test2Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test2Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test2Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class Test2Analyzer extends AbstractTestAnalyzer {
+ private final String[] tokens = { "sound", "cat", "speed" };
+
+ protected String[] getTokens() {
+ return tokens;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test2Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test3Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test3Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test3Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class Test3Analyzer extends AbstractTestAnalyzer {
+ private final String[] tokens = { "music", "elephant", "energy" };
+
+ protected String[] getTokens() {
+ return tokens;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test3Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test4Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test4Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test4Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class Test4Analyzer extends AbstractTestAnalyzer {
+ private final String[] tokens = { "noise", "mouse", "light" };
+
+ protected String[] getTokens() {
+ return tokens;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/Test4Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/AnalyzerInheritanceTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/AnalyzerInheritanceTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/AnalyzerInheritanceTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,106 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.inheritance;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.queryParser.QueryParser;
+import org.slf4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.util.AnalyzerUtils;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Test to verify HSEARCH-267.
+ *
+ * A base class defines a field as indexable without specifying an explicit analyzer. A subclass then defines ab analyzer
+ * at class level. This should also be the analyzer used for indexing the field in the base class.
+ *
+ * @author Hardy Ferentschik
+ */
+public class AnalyzerInheritanceTest extends SearchTestCase {
+
+ public static final Logger log = LoggerFactory.make();
+
+ /**
+ * Try to verify that the right analyzer is used by indexing and searching.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testBySearch() throws Exception {
+ SubClass testClass = new SubClass();
+ testClass.setName( "Proca\u00EFne" );
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( testClass );
+ tx.commit();
+
+ tx = s.beginTransaction();
+
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", s.getSearchFactory().getAnalyzer( SubClass.class ) );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "name:Proca\u00EFne" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery, SubClass.class );
+ assertEquals( 1, query.getResultSize() );
+
+ luceneQuery = parser.parse( "name:Procaine" );
+ query = s.createFullTextQuery( luceneQuery, SubClass.class );
+ assertEquals( 1, query.getResultSize() );
+
+ // make sure the result is not always 1
+ luceneQuery = parser.parse( "name:foo" );
+ query = s.createFullTextQuery( luceneQuery, SubClass.class );
+ assertEquals( 0, query.getResultSize() );
+
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * Try to verify that the right analyzer is used by explicitly retrieving the analyzer form the factory.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testByAnalyzerRetrieval() throws Exception {
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Analyzer analyzer = s.getSearchFactory().getAnalyzer( SubClass.class );
+
+ Token[] tokens = AnalyzerUtils.tokensFromAnalysis(analyzer, "name", "Proca\u00EFne");
+ AnalyzerUtils.assertTokensEqual( tokens, new String[]{"Procaine"});
+
+ s.close();
+ }
+
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { SubClass.class };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/AnalyzerInheritanceTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/BaseClass.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/BaseClass.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/BaseClass.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.inheritance;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Inheritance;
+import javax.persistence.InheritanceType;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
+public abstract class BaseClass {
+
+ private Integer id;
+
+ protected String name;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+
+ @Field(name = "name", index = Index.TOKENIZED, store = Store.YES)
+ public String getName() {
+ return name;
+ }
+
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/BaseClass.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/ISOLatin1Analyzer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/ISOLatin1Analyzer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/ISOLatin1Analyzer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.inheritance;
+
+import java.io.Reader;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.ASCIIFoldingFilter;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardTokenizer;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class ISOLatin1Analyzer extends Analyzer {
+
+ public TokenStream tokenStream(String s, Reader reader) {
+ TokenStream result = new StandardTokenizer( SearchTestCase.getTargetLuceneVersion(), reader );
+ return new ASCIIFoldingFilter(result);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/ISOLatin1Analyzer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/SubClass.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/SubClass.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/SubClass.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.inheritance;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+@Analyzer(impl = ISOLatin1Analyzer.class)
+public class SubClass extends BaseClass {
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/inheritance/SubClass.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilter.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.solr;
+
+import java.io.IOException;
+
+import org.apache.lucene.analysis.TokenFilter;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.TermAttribute;
+
+/**
+ * A filter which will actually insert spaces. Most filters/tokenizers remove them, but for testing it is
+ * sometimes better to insert them again ;-)
+ *
+ * @author Hardy Ferentschik
+ * @author Sanne Grinovero
+ */
+public class InsertWhitespaceFilter extends TokenFilter {
+
+ private TermAttribute termAtt;
+
+ public InsertWhitespaceFilter(TokenStream in) {
+ super( in );
+ termAtt = (TermAttribute) addAttribute(TermAttribute.class);
+ }
+
+ @Override
+ public boolean incrementToken() throws IOException {
+ if ( input.incrementToken() ) {
+ String value = " " + termAtt.term() + " ";
+ termAtt.setTermBuffer( value );
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilterFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilterFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilterFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.solr;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.solr.analysis.BaseTokenFilterFactory;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class InsertWhitespaceFilterFactory extends BaseTokenFilterFactory {
+ public InsertWhitespaceFilter create(TokenStream input) {
+ return new InsertWhitespaceFilter( input );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/InsertWhitespaceFilterFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/SolrAnalyzerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/SolrAnalyzerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/SolrAnalyzerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,185 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.solr;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.util.AnalyzerUtils;
+
+/**
+ * Tests the Solr analyzer creation framework.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class SolrAnalyzerTest extends SearchTestCase {
+
+ /**
+ * Tests that the token filters applied to <code>Team</code> are successfully created and used. Refer to
+ * <code>Team</code> to see the exat definitions.
+ *
+ * @throws Exception in case the test fails
+ */
+ public void testAnalyzerDef() throws Exception {
+ // create the test instance
+ Team team = new Team();
+ team.setDescription( "This is a D\u00E0scription" ); // \u00E0 == � - ISOLatin1AccentFilterFactory should strip of diacritic
+ team.setLocation( "Atlanta" );
+ team.setName( "ATL team" );
+
+ // persist and index the test object
+ FullTextSession fts = Search.getFullTextSession( openSession() );
+ Transaction tx = fts.beginTransaction();
+ fts.persist( team );
+ tx.commit();
+ fts.clear();
+
+ // execute several search to show that the right tokenizers were applies
+ tx = fts.beginTransaction();
+ TermQuery query = new TermQuery( new Term( "description", "D\u00E0scription" ) );
+ assertEquals(
+ "iso latin filter should work. � should be a now", 0, fts.createFullTextQuery( query ).list().size()
+ );
+
+ query = new TermQuery( new Term( "description", "is" ) );
+ assertEquals(
+ "stop word filter should work. is should be removed", 0, fts.createFullTextQuery( query ).list().size()
+ );
+
+ query = new TermQuery( new Term( "description", "dascript" ) );
+ assertEquals(
+ "snowball stemmer should work. 'dascription' should be stemmed to 'dascript'",
+ 1,
+ fts.createFullTextQuery( query ).list().size()
+ );
+
+ // cleanup
+ fts.delete( fts.createFullTextQuery( query ).list().get( 0 ) );
+ tx.commit();
+ fts.close();
+ }
+
+ /**
+ * Tests the analyzers defined on {@link Team}.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testAnalyzers() throws Exception {
+ FullTextSession fts = Search.getFullTextSession( openSession() );
+
+ Analyzer analyzer = fts.getSearchFactory().getAnalyzer( "standard_analyzer" );
+ String text = "This is just FOOBAR's";
+ Token[] tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "This", "is", "just", "FOOBAR" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "html_standard_analyzer" );
+ text = "This is <b>foo</b><i>bar's</i>";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "This", "is", "foo", "bar" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "html_whitespace_analyzer" );
+ text = "This is <b>foo</b><i>bar's</i>";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "This", "is", "foo", "bar's" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "trim_analyzer" );
+ text = " Kittens! ";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "kittens" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "length_analyzer" );
+ text = "ab abc abcd abcde abcdef";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "abc", "abcd", "abcde" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "length_analyzer" );
+ text = "ab abc abcd abcde abcdef";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "abc", "abcd", "abcde" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "porter_analyzer" );
+ text = "bikes bikes biking";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "bike", "bike", "bike" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "word_analyzer" );
+ text = "CamelCase";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "Camel", "Case" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "synonym_analyzer" );
+ text = "ipod cosmos";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "ipod", "i-pod", "universe", "cosmos" } );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "shingle_analyzer" );
+ text = "please divide this sentence into shingles";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual(
+ tokens,
+ new String[] {
+ "please",
+ "please divide",
+ "divide",
+ "divide this",
+ "this",
+ "this sentence",
+ "sentence",
+ "sentence into",
+ "into",
+ "into shingles",
+ "shingles"
+ }
+ );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "phonetic_analyzer" );
+ text = "The quick brown fox jumped over the lazy dogs";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.displayTokens( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual(
+ tokens, new String[] { "0", "KK", "BRN", "FKS", "JMPT", "OFR", "0", "LS", "TKS" }
+ );
+
+ analyzer = fts.getSearchFactory().getAnalyzer( "pattern_analyzer" );
+ text = "foo,bar";
+ tokens = AnalyzerUtils.tokensFromAnalysis( analyzer, "name", text );
+ AnalyzerUtils.assertTokensEqual( tokens, new String[] { "foo", "bar" } );
+
+ fts.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Team.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/SolrAnalyzerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/Team.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/Team.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/Team.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,204 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.analyzer.solr;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.apache.solr.analysis.HTMLStripStandardTokenizerFactory;
+import org.apache.solr.analysis.HTMLStripWhitespaceTokenizerFactory;
+import org.apache.solr.analysis.ISOLatin1AccentFilterFactory;
+import org.apache.solr.analysis.LengthFilterFactory;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.LowerCaseTokenizerFactory;
+import org.apache.solr.analysis.PorterStemFilterFactory;
+import org.apache.solr.analysis.ShingleFilterFactory;
+import org.apache.solr.analysis.SnowballPorterFilterFactory;
+import org.apache.solr.analysis.StandardFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+import org.apache.solr.analysis.StopFilterFactory;
+import org.apache.solr.analysis.SynonymFilterFactory;
+import org.apache.solr.analysis.TrimFilterFactory;
+import org.apache.solr.analysis.WordDelimiterFilterFactory;
+import org.apache.solr.analysis.PhoneticFilterFactory;
+import org.apache.solr.analysis.PatternTokenizerFactory;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.TokenizerDef;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "customanalyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = ISOLatin1AccentFilterFactory.class),
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = StopFilterFactory.class, params = {
+ @Parameter(name = "words",
+ value = "org/hibernate/search/test/analyzer/solr/stoplist.properties"),
+ @Parameter(name = "ignoreCase", value = "true")
+ }),
+ @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {
+ @Parameter(name = "language", value = "English")
+ })
+ }),
+
+ @AnalyzerDef(name = "pattern_analyzer",
+ tokenizer = @TokenizerDef(factory = PatternTokenizerFactory.class, params = {
+ @Parameter(name = "pattern", value = ",")
+ })),
+
+ @AnalyzerDef(name = "standard_analyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = StandardFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "html_standard_analyzer",
+ tokenizer = @TokenizerDef(factory = HTMLStripStandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = StandardFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "html_whitespace_analyzer",
+ tokenizer = @TokenizerDef(factory = HTMLStripWhitespaceTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = StandardFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "trim_analyzer",
+ tokenizer = @TokenizerDef(factory = LowerCaseTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = InsertWhitespaceFilterFactory.class),
+ @TokenFilterDef(factory = TrimFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "length_analyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LengthFilterFactory.class, params = {
+ @Parameter(name = "min", value = "3"),
+ @Parameter(name = "max", value = "5")
+ })
+ }),
+
+ @AnalyzerDef(name = "porter_analyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = PorterStemFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "word_analyzer",
+ tokenizer = @TokenizerDef(factory = HTMLStripStandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = WordDelimiterFilterFactory.class, params = {
+ @Parameter(name = "splitOnCaseChange", value = "1")
+ })
+ }),
+
+ @AnalyzerDef(name = "synonym_analyzer",
+ tokenizer = @TokenizerDef(factory = HTMLStripStandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = SynonymFilterFactory.class, params = {
+ @Parameter(name = "synonyms",
+ value = "org/hibernate/search/test/analyzer/solr/synonyms.properties")
+ })
+ }),
+
+ @AnalyzerDef(name = "shingle_analyzer",
+ tokenizer = @TokenizerDef(factory = HTMLStripStandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = ShingleFilterFactory.class)
+ }),
+
+ @AnalyzerDef(name = "phonetic_analyzer",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = PhoneticFilterFactory.class, params = {
+ @Parameter(name = "encoder", value = "Metaphone"),
+ @Parameter(name = "inject", value = "true")
+ })
+ })
+})
+public class Team {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String name;
+
+ @Field
+ private String location;
+
+ @Field
+ @Analyzer(definition = "customanalyzer")
+ private String description;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public void setLocation(String location) {
+ this.location = location;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/analyzer/solr/Team.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/AncientBook.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/AncientBook.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/AncientBook.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,45 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+
+@Entity
+@Indexed
+public class AncientBook extends Book {
+
+ public String catalogueGroupName = "";
+
+ public String getCatalogueGroupName() {
+ return catalogueGroupName;
+ }
+
+ public void setCatalogueGroupName(String catalogueGroupName) {
+ this.catalogueGroupName = catalogueGroupName;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/AncientBook.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Book.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Book.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Book.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+
+@Indexed
+@Entity
+public class Book implements TitleAble {
+
+ private long id;
+
+ private String title;
+
+ @Id @GeneratedValue
+ public long getId() {
+ return id;
+ }
+
+ public void setId(long id) {
+ this.id = id;
+ }
+
+ @Field
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Book.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Dvd.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Dvd.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Dvd.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+
+@Indexed
+@Entity
+public class Dvd implements TitleAble {
+
+ public long unusuallyNamedIdentifier;
+ public String title;
+
+ @Id
+ @GeneratedValue
+ public long getUnusuallyNamedIdentifier() {
+ return unusuallyNamedIdentifier;
+ }
+
+ public void setUnusuallyNamedIdentifier(long unusuallyNamedIdentifier) {
+ this.unusuallyNamedIdentifier = unusuallyNamedIdentifier;
+ }
+
+ @Field
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/Dvd.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/IndexingGeneratedCorpusTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/IndexingGeneratedCorpusTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/IndexingGeneratedCorpusTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,175 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import junit.framework.TestCase;
+
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.backend.impl.batchlucene.LuceneBatchBackend;
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+import org.hibernate.search.test.util.textbuilder.SentenceInventor;
+
+/**
+ * Tests the fullTextSession.createIndexer() API
+ * for basic functionality.
+ *
+ * @author Sanne Grinovero
+ */
+public class IndexingGeneratedCorpusTest extends TestCase {
+
+ private final int BOOK_NUM = 300;
+ private final int ANCIENTBOOK_NUM = 60;
+ private final int DVD_NUM = 200;
+
+ private SentenceInventor sentenceInventor = new SentenceInventor( 7L, 10000 );
+ private FullTextSessionBuilder builder;
+ private int totalEntitiesInDB = 0;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ builder = new FullTextSessionBuilder();
+ builder
+ .addAnnotatedClass( Book.class )
+ .addAnnotatedClass( Dvd.class )
+ .addAnnotatedClass( AncientBook.class )
+ .setProperty( "hibernate.show_sql", "false" ) //too verbose for this test
+ .setProperty( LuceneBatchBackend.CONCURRENT_WRITERS, "4" )
+ .build();
+ createMany( Book.class, BOOK_NUM );
+ createMany( Dvd.class, DVD_NUM );
+ createMany( AncientBook.class, ANCIENTBOOK_NUM );
+ }
+
+ @Override
+ protected void tearDown() {
+ builder.close();
+ }
+
+ private void createMany(Class<? extends TitleAble> entityType, int amount ) throws InstantiationException, IllegalAccessException {
+ FullTextSession fullTextSession = builder.openFullTextSession();
+ try {
+ Transaction tx = fullTextSession.beginTransaction();
+ for ( int i = 0; i < amount; i++ ) {
+ TitleAble instance = entityType.newInstance();
+ instance.setTitle( sentenceInventor.nextSentence() );
+ fullTextSession.persist( instance );
+ totalEntitiesInDB++;
+ if ( i % 250 == 249 ) {
+ tx.commit();
+ fullTextSession.clear();
+ System.out.println( "Test preparation: " + totalEntitiesInDB + " entities persisted" );
+ tx = fullTextSession.beginTransaction();
+ }
+ }
+ tx.commit();
+ }
+ finally {
+ fullTextSession.close();
+ }
+ }
+
+ public void testBatchIndexing() throws InterruptedException {
+ verifyResultNumbers(); //initial count of entities should match expectations
+ purgeAll(); // empty indexes
+ verifyIsEmpty();
+ reindexAll(); // rebuild the indexes
+ verifyResultNumbers(); // verify the count match again
+ reindexAll(); //tests that purgeAll is automatic:
+ verifyResultNumbers(); //..same numbers again
+ }
+
+ private void reindexAll() throws InterruptedException {
+ FullTextSession fullTextSession = builder.openFullTextSession();
+ try {
+ fullTextSession.createIndexer( Object.class )
+ .threadsForSubsequentFetching( 8 )
+ .threadsToLoadObjects( 4 )
+ .batchSizeToLoadObjects( 30 )
+ .startAndWait();
+ }
+ finally {
+ fullTextSession.close();
+ }
+ }
+
+ private void purgeAll() {
+ FullTextSession fullTextSession = builder.openFullTextSession();
+ try {
+ Transaction tx = fullTextSession.beginTransaction();
+ fullTextSession.purgeAll( Object.class );
+ tx.commit();
+ }
+ finally {
+ fullTextSession.close();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void verifyResultNumbers() {
+ assertEquals( DVD_NUM,
+ countByFT( Dvd.class ) );
+ assertEquals( ANCIENTBOOK_NUM + BOOK_NUM,
+ countByFT( Book.class ) );
+ assertEquals( ANCIENTBOOK_NUM,
+ countByFT( AncientBook.class ) );
+ assertEquals( DVD_NUM + ANCIENTBOOK_NUM + BOOK_NUM,
+ countByFT( AncientBook.class, Book.class, Dvd.class ) );
+ assertEquals( DVD_NUM + ANCIENTBOOK_NUM,
+ countByFT( AncientBook.class, Dvd.class ) );
+ }
+
+ @SuppressWarnings("unchecked")
+ private void verifyIsEmpty() {
+ assertEquals( 0, countByFT( Dvd.class ) );
+ assertEquals( 0, countByFT( Book.class ) );
+ assertEquals( 0, countByFT( AncientBook.class ) );
+ assertEquals( 0, countByFT( AncientBook.class, Book.class, Dvd.class ) );
+ }
+
+ private int countByFT(Class<? extends TitleAble>... types) {
+ Query findAll = new MatchAllDocsQuery();
+ int bySize = 0;
+ int byResultSize = 0;
+ FullTextSession fullTextSession = builder.openFullTextSession();
+ try {
+ Transaction tx = fullTextSession.beginTransaction();
+ FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery( findAll, types );
+ bySize = fullTextQuery.list().size();
+ byResultSize = fullTextQuery.getResultSize();
+ tx.commit();
+ }
+ finally {
+ fullTextSession.close();
+ }
+ assertEquals( bySize, byResultSize );
+ return bySize;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/IndexingGeneratedCorpusTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/ModernBook.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/ModernBook.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/ModernBook.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+
+@Entity
+@Indexed
+public class ModernBook extends Book {
+
+ public String isbn = null;
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/ModernBook.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/SearchIndexerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/SearchIndexerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/SearchIndexerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,150 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+import java.util.Set;
+
+import junit.framework.TestCase;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.impl.MassIndexerImpl;
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+public class SearchIndexerTest extends TestCase {
+
+ /**
+ * test that the MassIndexer is properly identifying the root entities
+ * from the selection of classes to be indexed.
+ */
+ public void testEntityHierarchy() {
+ FullTextSessionBuilder ftsb = new FullTextSessionBuilder()
+ .addAnnotatedClass( ModernBook.class )
+ .addAnnotatedClass( AncientBook.class )
+ .addAnnotatedClass( Dvd.class )
+ .addAnnotatedClass( Book.class )
+ .build();
+ FullTextSession fullTextSession = ftsb.openFullTextSession();
+ SearchFactoryImplementor searchFactory = (SearchFactoryImplementor) fullTextSession.getSearchFactory();
+ {
+ TestableMassIndexerImpl tsii = new TestableMassIndexerImpl( searchFactory, Book.class );
+ assertTrue( tsii.getRootEntities().contains( Book.class ) );
+ assertFalse( tsii.getRootEntities().contains( ModernBook.class ) );
+ assertFalse( tsii.getRootEntities().contains( AncientBook.class ) );
+ }
+ {
+ TestableMassIndexerImpl tsii = new TestableMassIndexerImpl( searchFactory, ModernBook.class, AncientBook.class, Book.class );
+ assertTrue( tsii.getRootEntities().contains( Book.class ) );
+ assertFalse( tsii.getRootEntities().contains( ModernBook.class ) );
+ assertFalse( tsii.getRootEntities().contains( AncientBook.class ) );
+ }
+ {
+ TestableMassIndexerImpl tsii = new TestableMassIndexerImpl( searchFactory, ModernBook.class, AncientBook.class );
+ assertFalse( tsii.getRootEntities().contains( Book.class ) );
+ assertTrue( tsii.getRootEntities().contains( ModernBook.class ) );
+ assertTrue( tsii.getRootEntities().contains( AncientBook.class ) );
+ }
+ //verify that indexing Object will result in one separate indexer working per root indexed entity
+ {
+ TestableMassIndexerImpl tsii = new TestableMassIndexerImpl( searchFactory, Object.class );
+ assertTrue( tsii.getRootEntities().contains( Book.class ) );
+ assertTrue( tsii.getRootEntities().contains( Dvd.class ) );
+ assertFalse( tsii.getRootEntities().contains( AncientBook.class ) );
+ assertFalse( tsii.getRootEntities().contains( Object.class ) );
+ assertEquals( 2, tsii.getRootEntities().size() );
+ }
+ }
+
+ private static class TestableMassIndexerImpl extends MassIndexerImpl {
+
+ protected TestableMassIndexerImpl(SearchFactoryImplementor searchFactory, Class<?>... types) {
+ super( searchFactory, null, types );
+ }
+
+ public Set<Class<?>> getRootEntities() {
+ return this.rootEntities;
+ }
+
+ }
+
+ /**
+ * Test to verify that the identifier loading works even when
+ * the property is not called "id"
+ */
+ public void testIdentifierNaming() throws InterruptedException {
+ //disable automatic indexing, to test manual index creation.
+ FullTextSessionBuilder ftsb = new FullTextSessionBuilder()
+ .setProperty( org.hibernate.search.Environment.ANALYZER_CLASS, StandardAnalyzer.class.getName() )
+ .addAnnotatedClass( Dvd.class )
+ .setProperty( Environment.INDEXING_STRATEGY, "manual" )
+ .build();
+ {
+ //creating the test data in database only:
+ FullTextSession fullTextSession = ftsb.openFullTextSession();
+ Transaction transaction = fullTextSession.beginTransaction();
+ Dvd dvda = new Dvd();
+ dvda.setTitle( "Star Trek (episode 96367)" );
+ fullTextSession.save(dvda);
+ Dvd dvdb = new Dvd();
+ dvdb.setTitle( "The Trek" );
+ fullTextSession.save(dvdb);
+ transaction.commit();
+ fullTextSession.close();
+ }
+ {
+ //verify index is still empty:
+ assertEquals( 0, countResults( new Term( "title", "trek" ), ftsb, Dvd.class ) );
+ }
+ {
+ FullTextSession fullTextSession = ftsb.openFullTextSession();
+ fullTextSession.createIndexer( Dvd.class )
+ .startAndWait();
+ fullTextSession.close();
+ }
+ {
+ //verify index is now containing both DVDs:
+ assertEquals( 2, countResults( new Term( "title", "trek" ), ftsb, Dvd.class ) );
+ }
+ }
+
+ //helper method
+ private int countResults( Term termForQuery, FullTextSessionBuilder ftSessionBuilder, Class<?> type ) {
+ TermQuery fullTextQuery = new TermQuery( termForQuery );
+ FullTextSession fullTextSession = ftSessionBuilder.openFullTextSession();
+ Transaction transaction = fullTextSession.beginTransaction();
+ FullTextQuery query = fullTextSession.createFullTextQuery( fullTextQuery, type );
+ int resultSize = query.getResultSize();
+ transaction.commit();
+ fullTextSession.close();
+ return resultSize;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/SearchIndexerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/TitleAble.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/TitleAble.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/TitleAble.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,33 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.batchindexing;
+
+public interface TitleAble {
+
+ public String getTitle();
+
+ public void setTitle(String title);
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/batchindexing/TitleAble.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/BridgeTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/BridgeTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/BridgeTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,282 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.net.URI;
+import java.net.URL;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.bridge.builtin.CalendarBridge;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BridgeTest extends SearchTestCase {
+ public void testDefaultAndNullBridges() throws Exception {
+ Cloud cloud = new Cloud();
+ cloud.setMyDate( null );
+ cloud.setDouble1( null );
+ cloud.setDouble2( 2.1d );
+ cloud.setIntegerv1( null );
+ cloud.setIntegerv2( 2 );
+ cloud.setFloat1( null );
+ cloud.setFloat2( 2.1f );
+ cloud.setLong1( null );
+ cloud.setLong2( 2l );
+ cloud.setString( null );
+ cloud.setType( CloudType.DOG );
+ cloud.setChar1( null );
+ cloud.setChar2( 'P' );
+ cloud.setStorm( false );
+ cloud.setClazz( Cloud.class );
+ cloud.setUri( new URI( "http://www.hibernate.org" ) );
+ cloud.setUrl( new URL( "http://www.hibernate.org" ) );
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( cloud );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List result;
+
+ query = parser.parse(
+ "double2:[2.1 TO 2.1] AND float2:[2.1 TO 2.1] " +
+ "AND integerv2:[2 TO 2.1] AND long2:[2 TO 2.1] AND type:\"dog\" AND storm:false"
+ );
+
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "find primitives and do not fail on null", 1, result.size() );
+
+ query = parser.parse( "double1:[2.1 TO 2.1] OR float1:[2.1 TO 2.1] OR integerv1:[2 TO 2.1] OR long1:[2 TO 2.1]" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "null elements should not be stored", 0, result.size() ); //the query is dumb because restrictive
+
+ query = parser.parse( "type:dog" );
+ result = session.createFullTextQuery( query ).setProjection( "type" ).list();
+ assertEquals( "Enum projection works", 1, result.size() ); //the query is dumb because restrictive
+
+ query = new TermQuery( new Term( "clazz", Cloud.class.getName() ) );
+ result = session.createFullTextQuery( query ).setProjection( "clazz" ).list();
+ assertEquals( "Clazz projection works", 1, result.size() );
+ assertEquals(
+ "Clazz projection works",
+ Cloud.class.getName(),
+ ( ( Class ) ( ( Object[] ) result.get( 0 ) )[0] ).getName()
+ );
+
+ BooleanQuery bQuery = new BooleanQuery();
+ bQuery.add( new TermQuery( new Term( "uri", "http://www.hibernate.org" ) ), BooleanClause.Occur.MUST );
+ bQuery.add( new TermQuery( new Term( "url", "http://www.hibernate.org" ) ), BooleanClause.Occur.MUST );
+
+ result = session.createFullTextQuery( bQuery ).setProjection( "clazz" ).list();
+ assertEquals( "Clazz projection works", 1, result.size() );
+
+ query = parser.parse( "char1:[" + String.valueOf( Character.MIN_VALUE ) + " TO " + String.valueOf( Character.MAX_VALUE ) + "]" );
+ result = session.createFullTextQuery( query ).setProjection( "char1" ).list();
+ assertEquals( "Null elements should not be stored, CharacterBridge is not working", 0, result.size() );
+
+ query = parser.parse( "char2:P" );
+ result = session.createFullTextQuery( query ).setProjection( "char2" ).list();
+ assertEquals( "Wrong results number, CharacterBridge is not working", 1, result.size() );
+ assertEquals( "Wrong result, CharacterBridge is not working", 'P', ( ( Object[] ) result.get( 0 ) )[0] );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testCustomBridges() throws Exception {
+ Cloud cloud = new Cloud();
+ cloud.setCustomFieldBridge( "This is divided by 2" );
+ cloud.setCustomStringBridge( "This is div by 4" );
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( cloud );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.simpleAnalyzer );
+ Query query;
+ List result;
+
+ query = parser.parse( "customFieldBridge:This AND customStringBridge:This" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "Properties not mapped", 1, result.size() );
+
+ query = parser.parse( "customFieldBridge:by AND customStringBridge:is" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "Custom types not taken into account", 0, result.size() );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testDateBridge() throws Exception {
+ Cloud cloud = new Cloud();
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set( 2000, 11, 15, 3, 43, 2 );
+ c.set( Calendar.MILLISECOND, 5 );
+
+ Date date = new Date( c.getTimeInMillis() );
+ cloud.setMyDate( date ); //5 millisecond
+ cloud.setDateDay( date );
+ cloud.setDateHour( date );
+ cloud.setDateMillisecond( date );
+ cloud.setDateMinute( date );
+ cloud.setDateMonth( date );
+ cloud.setDateSecond( date );
+ cloud.setDateYear( date );
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( cloud );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List result;
+
+ query = parser.parse(
+ "myDate:[19900101 TO 20060101]"
+ + " AND dateDay:[20001214 TO 2000121501]"
+ + " AND dateMonth:[200012 TO 20001201]"
+ + " AND dateYear:[2000 TO 200001]"
+ + " AND dateHour:[20001214 TO 2000121503]"
+ + " AND dateMinute:[20001214 TO 200012150343]"
+ + " AND dateSecond:[20001214 TO 20001215034302]"
+ + " AND dateMillisecond:[20001214 TO 20001215034302005]"
+ );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "Date not found or not property truncated", 1, result.size() );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ }
+
+
+ public void testCalendarBridge() throws Exception {
+ Cloud cloud = new Cloud();
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set( 2000, 11, 15, 3, 43, 2 );
+ c.set( Calendar.MILLISECOND, 5 );
+
+
+ cloud.setMyCalendar(c); //5 millisecond
+ cloud.setCalendarDay(c);
+ cloud.setCalendarHour( c );
+ cloud.setCalendarMillisecond( c );
+ cloud.setCalendarMinute( c );
+ cloud.setCalendarMonth( c );
+ cloud.setCalendarSecond( c );
+ cloud.setCalendarYear( c );
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( cloud );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List result;
+
+ query = parser.parse(
+ "myCalendar:[19900101 TO 20060101]"
+ + " AND calendarDay:[20001214 TO 2000121501]"
+ + " AND calendarMonth:[200012 TO 20001201]"
+ + " AND calendarYear:[2000 TO 200001]"
+ + " AND calendarHour:[20001214 TO 2000121503]"
+ + " AND calendarMinute:[20001214 TO 200012150343]"
+ + " AND calendarSecond:[20001214 TO 20001215034302]"
+ + " AND calendarMillisecond:[20001214 TO 20001215034302005]"
+ );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "Calendar not found or not property truncated", 1, result.size() );
+
+ s.delete( s.get( Cloud.class, cloud.getId() ) );
+ tx.commit();
+ s.close();
+
+ //now unit-test the bridge directly:
+
+ CalendarBridge bridge = new CalendarBridge();
+ HashMap<String, String> bridgeParams = new HashMap<String, String>();
+ bridgeParams.put( CalendarBridge.RESOLUTION_PARAMETER, Resolution.YEAR.toString() );
+ bridge.setParameterValues( bridgeParams );
+ assertEquals( "2000", bridge.objectToString( c ) );
+ bridgeParams.put( CalendarBridge.RESOLUTION_PARAMETER, Resolution.DAY.toString() );
+ bridge.setParameterValues( bridgeParams );
+ assertEquals( "20001215", bridge.objectToString( c ) );
+
+ }
+
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Cloud.class
+ };
+ }
+
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.ANALYZER_CLASS, SimpleAnalyzer.class.getName() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/BridgeTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatDeptsFieldsClassBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatDeptsFieldsClassBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatDeptsFieldsClassBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author John Griffin
+ */
+public class CatDeptsFieldsClassBridge implements FieldBridge, ParameterizedBridge {
+
+ private String sepChar;
+
+ @SuppressWarnings("unchecked")
+ public void setParameterValues(Map parameters) {
+ this.sepChar = (String) parameters.get( "sepChar" );
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Departments dep = (Departments) value;
+ String fieldValue1 = dep.getBranch();
+ if ( fieldValue1 == null ) {
+ fieldValue1 = "";
+ }
+ String fieldValue2 = dep.getNetwork();
+ if ( fieldValue2 == null ) {
+ fieldValue2 = "";
+ }
+ String fieldValue = fieldValue1 + sepChar + fieldValue2;
+ Field field = new Field( name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatDeptsFieldsClassBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatFieldsClassBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatFieldsClassBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatFieldsClassBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author John Griffin
+ */
+public class CatFieldsClassBridge implements FieldBridge, ParameterizedBridge {
+
+ private String sepChar;
+
+ @SuppressWarnings("unchecked")
+ public void setParameterValues(Map parameters) {
+ this.sepChar = (String) parameters.get( "sepChar" );
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Department dep = (Department) value;
+ String fieldValue1 = dep.getBranch();
+ if ( fieldValue1 == null ) {
+ fieldValue1 = "";
+ }
+ String fieldValue2 = dep.getNetwork();
+ if ( fieldValue2 == null ) {
+ fieldValue2 = "";
+ }
+ String fieldValue = fieldValue1 + sepChar + fieldValue2;
+ Field field = new Field( name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CatFieldsClassBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeAndProjectionTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeAndProjectionTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeAndProjectionTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,74 @@
+package org.hibernate.search.test.bridge;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+import java.util.List;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ClassBridgeAndProjectionTest extends SearchTestCase {
+
+ public void testClassBridgeProjection() throws Exception {
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+
+ // create entities
+ Teacher teacher = new Teacher();
+ teacher.setName("John Smith");
+ s.persist(teacher);
+
+ Student student1 = new Student();
+ student1.setGrade("foo");
+ student1.setName("Jack Miller");
+ student1.setTeacher(teacher);
+ teacher.getStudents().add(student1);
+ s.persist(student1);
+
+ Student student2 = new Student();
+ student2.setGrade("bar");
+ student2.setName("Steve Marshall");
+ student2.setTeacher(teacher);
+ teacher.getStudents().add(student2);
+ s.persist(student2);
+
+ tx.commit();
+
+ // test query without projection
+ FullTextSession ftSession = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser(
+ getTargetLuceneVersion(),
+ "name",
+ standardAnalyzer );
+ FullTextQuery query = ftSession.createFullTextQuery(parser.parse("name:John"), Teacher.class);
+ List results = query.list();
+ assertNotNull(results);
+ assertTrue(results.size() == 1);
+ assertTrue(((Teacher) results.get(0)).getStudents().size() == 2);
+
+ // now test with projection
+ query.setProjection("amount_of_students");
+ results = query.list();
+ assertNotNull(results);
+ assertTrue(results.size() == 1);
+ Object[] firstResult = (Object[]) results.get(0);
+ Integer amountStudents = (Integer) firstResult[0];
+ assertEquals(new Integer(2), amountStudents);
+
+ s.close();
+ }
+
+ @Override
+ protected Class<?>[] getMappings() {
+ return new Class<?>[] {
+ Student.class,
+ Teacher.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeAndProjectionTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,335 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.List;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.document.Document;
+import org.hibernate.Transaction;
+import org.hibernate.ScrollableResults;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author John Griffin
+ */
+public class ClassBridgeTest extends SearchTestCase {
+ /**
+ * This tests that a field created by a user-supplied
+ * EquipmentType class has been created and is a translation
+ * from an identifier to a manufacturer name.
+ *
+ * @throws Exception
+ */
+ public void testClassBridges() throws Exception {
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( getDepts1() );
+ s.persist( getDepts2() );
+ s.persist( getDepts3() );
+ s.persist( getDepts4() );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+
+ // The equipment field is the manufacturer field in the
+ // Departments entity after being massaged by passing it
+ // through the EquipmentType class. This field is in
+ // the Lucene document but not in the Department entity itself.
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "equipment", SearchTestCase.simpleAnalyzer );
+
+ // Check the second ClassBridge annotation
+ Query query = parser.parse( "equiptype:Cisco" );
+ org.hibernate.search.FullTextQuery hibQuery = session.createFullTextQuery( query, Departments.class );
+ List<Departments> result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect number of results returned", 2, result.size() );
+ for (Departments d : result) {
+ assertEquals("incorrect manufacturer", "C", d.getManufacturer());
+ }
+
+ // No data cross-ups.
+ query = parser.parse( "branchnetwork:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "problem with field cross-ups", result.size() == 0 );
+
+ // Non-ClassBridge field.
+ parser = new QueryParser( getTargetLuceneVersion(), "branchHead", SearchTestCase.simpleAnalyzer );
+ query = parser.parse( "branchHead:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "incorrect entity returned, wrong branch head", result.size() == 1 );
+ assertEquals("incorrect entity returned", "Kent Lewin", ( result.get( 0 ) ).getBranchHead());
+
+ // Check other ClassBridge annotation.
+ parser = new QueryParser( getTargetLuceneVersion(), "branchnetwork", SearchTestCase.simpleAnalyzer );
+ query = parser.parse( "branchnetwork:st. george 1D" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect entity returned, wrong network", "1D", ( result.get( 0 ) ).getNetwork() );
+ assertEquals( "incorrect entity returned, wrong branch", "St. George", ( result.get( 0 ) ).getBranch() );
+ assertEquals( "incorrect number of results returned", 1, result.size() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Departments.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * This is the same test as above with a projection query
+ * to show the presence of the ClassBridge impl built fields
+ * just in case you don't believe us.
+ *
+ * @throws Exception
+ */
+ public void testClassBridgesWithProjection() throws Exception {
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( getDepts1() );
+ s.persist( getDepts2() );
+ s.persist( getDepts3() );
+ s.persist( getDepts4() );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+
+ // The equipment field is the manufacturer field in the
+ // Departments entity after being massaged by passing it
+ // through the EquipmentType class. This field is in
+ // the Lucene document but not in the Department entity itself.
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "equipment", SearchTestCase.simpleAnalyzer );
+
+ // Check the second ClassBridge annotation
+ Query query = parser.parse( "equiptype:Cisco" );
+ org.hibernate.search.FullTextQuery hibQuery = session.createFullTextQuery( query, Departments.class );
+
+ hibQuery.setProjection( FullTextQuery.THIS, FullTextQuery.DOCUMENT );
+
+ ScrollableResults projections = hibQuery.scroll();
+ assertNotNull( projections );
+
+ projections.beforeFirst();
+ projections.next();
+ Object[] projection = projections.get();
+
+ assertTrue( "DOCUMENT incorrect", projection[0] instanceof Departments );
+ assertEquals( "id incorrect", 1, ((Departments)projection[0]).getId() );
+ assertTrue( "DOCUMENT incorrect", projection[1] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 8, ( (Document) projection[1] ).getFields().size() );
+ assertNotNull( "equiptype is null", ( (Document) projection[1] ).getField("equiptype") );
+ assertEquals( "equiptype incorrect", "Cisco", ( (Document) projection[1] ).getField("equiptype" ).stringValue() );
+ assertNotNull( "branchnetwork is null", ( (Document) projection[1] ).getField("branchnetwork") );
+ assertEquals( "branchnetwork incorrect", "Salt Lake City 1A", ( (Document) projection[1] ).getField("branchnetwork" ).stringValue() );
+
+ projections.next();
+ projection = projections.get();
+
+ assertTrue( "DOCUMENT incorrect", projection[0] instanceof Departments );
+ assertEquals( "id incorrect", 4, ((Departments)projection[0]).getId() );
+ assertTrue( "DOCUMENT incorrect", projection[1] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 8, ( (Document) projection[1] ).getFields().size() );
+ assertNotNull( "equiptype is null", ( (Document) projection[1] ).getField("equiptype") );
+ assertEquals( "equiptype incorrect", "Cisco", ( (Document) projection[1] ).getField("equiptype" ).stringValue() );
+ assertNotNull( "branchnetwork is null", ( (Document) projection[1] ).getField("branchnetwork") );
+ assertEquals( "branchnetwork incorrect", "St. George 1D", ( (Document) projection[1] ).getField("branchnetwork" ).stringValue() );
+
+ assertTrue("incorrect result count returned", projections.isLast());
+ //cleanup
+ for (Object element : s.createQuery( "from " + Departments.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * This test checks for two fields being concatentated by the user-supplied
+ * CatFieldsClassBridge class which is specified as the implementation class
+ * in the ClassBridge annotation of the Department class.
+ *
+ * @throws Exception
+ */
+ public void testClassBridge() throws Exception {
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( getDept1() );
+ s.persist( getDept2() );
+ s.persist( getDept3() );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+
+ // The branchnetwork field is the concatenation of both
+ // the branch field and the network field of the Department
+ // class. This is in the Lucene document but not in the
+ // Department entity itself.
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "branchnetwork", SearchTestCase.simpleAnalyzer );
+
+ Query query = parser.parse( "branchnetwork:layton 2B" );
+ org.hibernate.search.FullTextQuery hibQuery = session.createFullTextQuery( query, Department.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect entity returned, wrong network", "2B", ( (Department) result.get( 0 ) ).getNetwork() );
+ assertEquals( "incorrect entity returned, wrong branch", "Layton", ( (Department) result.get( 0 ) ).getBranch() );
+ assertEquals( "incorrect number of results returned", 1, result.size() );
+
+ // Partial match.
+ query = parser.parse( "branchnetwork:3c" );
+ hibQuery = session.createFullTextQuery( query, Department.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect entity returned, wrong network", "3C", ( (Department) result.get( 0 ) ).getNetwork() );
+ assertEquals( "incorrect entity returned, wrong branch", "West Valley", ( (Department) result.get( 0 ) ).getBranch() );
+ assertEquals( "incorrect number of results returned", 1, result.size() );
+
+ // No data cross-ups .
+ query = parser.parse( "branchnetwork:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Department.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "problem with field cross-ups", result.size() == 0 );
+
+ // Non-ClassBridge field.
+ parser = new QueryParser( getTargetLuceneVersion(), "branchHead", SearchTestCase.simpleAnalyzer );
+ query = parser.parse( "branchHead:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Department.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "incorrect entity returned, wrong branch head", result.size() == 1 );
+ assertEquals("incorrect entity returned", "Kent Lewin", ( (Department) result.get( 0 ) ).getBranchHead());
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Department.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ private Department getDept1() {
+ Department dept = new Department();
+
+ dept.setBranch( "Salt Lake City" );
+ dept.setBranchHead( "Kent Lewin" );
+ dept.setMaxEmployees( 100 );
+ dept.setNetwork( "1A" );
+ return dept;
+ }
+
+ private Department getDept2() {
+ Department dept = new Department();
+
+ dept.setBranch( "Layton" );
+ dept.setBranchHead( "Terry Poperszky" );
+ dept.setMaxEmployees( 20 );
+ dept.setNetwork( "2B" );
+
+ return dept;
+ }
+
+ private Department getDept3() {
+ Department dept = new Department();
+
+ dept.setBranch( "West Valley" );
+ dept.setBranchHead( "Pat Kelley" );
+ dept.setMaxEmployees( 15 );
+ dept.setNetwork( "3C" );
+
+ return dept;
+ }
+
+ private Departments getDepts1() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "Salt Lake City" );
+ depts.setBranchHead( "Kent Lewin" );
+ depts.setMaxEmployees( 100 );
+ depts.setNetwork( "1A" );
+ depts.setManufacturer( "C" );
+
+ return depts;
+ }
+
+ private Departments getDepts2() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "Layton" );
+ depts.setBranchHead( "Terry Poperszky" );
+ depts.setMaxEmployees( 20 );
+ depts.setNetwork( "2B" );
+ depts.setManufacturer( "3" );
+
+ return depts;
+ }
+
+ private Departments getDepts3() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "West Valley" );
+ depts.setBranchHead( "Pat Kelley" );
+ depts.setMaxEmployees( 15 );
+ depts.setNetwork( "3C" );
+ depts.setManufacturer( "D" );
+
+ return depts;
+ }
+
+ private Departments getDepts4() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "St. George" );
+ depts.setBranchHead( "Spencer Stajskal" );
+ depts.setMaxEmployees( 10 );
+ depts.setNetwork( "1D" );
+ depts.setManufacturer( "C" );
+ return depts;
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Department.class,
+ Departments.class
+ };
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.ANALYZER_CLASS, SimpleAnalyzer.class.getName() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/ClassBridgeTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Cloud.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Cloud.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Cloud.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,411 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Date;
+import java.util.Calendar;
+import java.net.URL;
+import java.net.URI;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.*;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Cloud {
+
+ private int id;
+ private Long long1;
+ private long long2;
+ private Integer integerv1;
+ private int integerv2;
+ private Double double1;
+ private double double2;
+ private Float float1;
+ private float float2;
+ private String string;
+ private Date myDate;
+ private Date dateYear;
+ private Date dateMonth;
+ private Date dateDay;
+ private Date dateHour;
+ private Date dateMinute;
+ private Date dateSecond;
+ private Date dateMillisecond;
+ private String customFieldBridge;
+ private String customStringBridge;
+ private Character char1;
+ private char char2;
+ private CloudType type;
+ private boolean storm;
+ private Class clazz;
+ private URL url;
+ private URI uri;
+ private Calendar myCalendar;
+ private Calendar calendarYear;
+ private Calendar calendarMonth;
+ private Calendar calendarDay;
+ private Calendar calendarMinute;
+ private Calendar calendarSecond;
+ private Calendar calendarHour;
+ private Calendar calendarMillisecond;
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public URL getUrl() {
+ return url;
+ }
+
+ public void setUrl(URL url) {
+ this.url = url;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public URI getUri() {
+ return uri;
+ }
+
+ public void setUri(URI uri) {
+ this.uri = uri;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Class getClazz() {
+ return clazz;
+ }
+
+ public void setClazz(Class clazz) {
+ this.clazz = clazz;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ @FieldBridge(impl = TruncateFieldBridge.class)
+ public String getCustomFieldBridge() {
+ return customFieldBridge;
+ }
+
+ public void setCustomFieldBridge(String customFieldBridge) {
+ this.customFieldBridge = customFieldBridge;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES,
+ bridge = @FieldBridge(impl = TruncateStringBridge.class, params = @Parameter( name="dividedBy", value="4" ) )
+ )
+ public String getCustomStringBridge() {
+ return customStringBridge;
+ }
+
+ public void setCustomStringBridge(String customStringBridge) {
+ this.customStringBridge = customStringBridge;
+ }
+
+ @Id @GeneratedValue @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Long getLong1() {
+ return long1;
+ }
+
+ public void setLong1(Long long1) {
+ this.long1 = long1;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public long getLong2() {
+ return long2;
+ }
+
+ public void setLong2(long long2) {
+ this.long2 = long2;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Integer getIntegerv1() {
+ return integerv1;
+ }
+
+ public void setIntegerv1(Integer integerv1) {
+ this.integerv1 = integerv1;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public int getIntegerv2() {
+ return integerv2;
+ }
+
+ public void setIntegerv2(int integerv2) {
+ this.integerv2 = integerv2;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Double getDouble1() {
+ return double1;
+ }
+
+ public void setDouble1(Double double1) {
+ this.double1 = double1;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public double getDouble2() {
+ return double2;
+ }
+
+ public void setDouble2(double double2) {
+ this.double2 = double2;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Float getFloat1() {
+ return float1;
+ }
+
+ public void setFloat1(Float float1) {
+ this.float1 = float1;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public float getFloat2() {
+ return float2;
+ }
+
+ public void setFloat2(float float2) {
+ this.float2 = float2;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getString() {
+ return string;
+ }
+
+ public void setString(String string) {
+ this.string = string;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Date getMyDate() {
+ return myDate;
+ }
+
+ public void setMyDate(Date myDate) {
+ this.myDate = myDate;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.YEAR )
+ public Date getDateYear() {
+ return dateYear;
+ }
+
+ public void setDateYear(Date dateYear) {
+ this.dateYear = dateYear;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.MONTH )
+ public Date getDateMonth() {
+ return dateMonth;
+ }
+
+ public void setDateMonth(Date dateMonth) {
+ this.dateMonth = dateMonth;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.DAY )
+ public Date getDateDay() {
+ return dateDay;
+ }
+
+ public void setDateDay(Date dateDay) {
+ this.dateDay = dateDay;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.HOUR )
+ public Date getDateHour() {
+ return dateHour;
+ }
+
+ public void setDateHour(Date dateHour) {
+ this.dateHour = dateHour;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.MINUTE )
+ public Date getDateMinute() {
+ return dateMinute;
+ }
+
+ public void setDateMinute(Date dateMinute) {
+ this.dateMinute = dateMinute;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.SECOND )
+ public Date getDateSecond() {
+ return dateSecond;
+ }
+
+ public void setDateSecond(Date dateSecond) {
+ this.dateSecond = dateSecond;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge( resolution = Resolution.MILLISECOND )
+ public Date getDateMillisecond() {
+ return dateMillisecond;
+ }
+
+ public void setDateMillisecond(Date dateMillisecond) {
+ this.dateMillisecond = dateMillisecond;
+ }
+
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public CloudType getType() {
+ return type;
+ }
+
+ public void setType(CloudType type) {
+ this.type = type;
+ }
+
+ @Field(index = Index.TOKENIZED )
+ public boolean isStorm() {
+ return storm;
+ }
+
+ public void setStorm(boolean storm) {
+ this.storm = storm;
+ }
+
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public Character getChar1() {
+ return char1;
+ }
+
+ public void setChar1(Character char1) {
+ this.char1 = char1;
+ }
+
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public char getChar2() {
+ return char2;
+ }
+
+ public void setChar2(char char2) {
+ this.char2 = char2;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Calendar getMyCalendar() {
+ return myCalendar;
+ }
+
+ public void setMyCalendar(Calendar myCalendar) {
+ this.myCalendar = myCalendar;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge(resolution = Resolution.YEAR )
+ public Calendar getCalendarYear() {
+ return calendarYear;
+ }
+
+ public void setCalendarYear(Calendar calendarYear) {
+ this.calendarYear = calendarYear;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.MONTH )
+ public Calendar getCalendarMonth() {
+ return calendarMonth;
+ }
+
+ public void setCalendarMonth(Calendar calendarMonth) {
+ this.calendarMonth = calendarMonth;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.DAY )
+ public Calendar getCalendarDay() {
+ return calendarDay;
+ }
+
+ public void setCalendarDay(Calendar calendarDay) {
+ this.calendarDay = calendarDay;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.MINUTE )
+ public Calendar getCalendarMinute() {
+ return calendarMinute;
+ }
+
+ public void setCalendarMinute(Calendar calendarMinute) {
+ this.calendarMinute = calendarMinute;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.HOUR )
+ public Calendar getCalendarHour() {
+ return calendarHour;
+ }
+
+ public void setCalendarHour(Calendar calendarHour) {
+ this.calendarHour = calendarHour;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.MILLISECOND )
+ public Calendar getCalendarMillisecond() {
+ return calendarMillisecond;
+ }
+
+ public void setCalendarMillisecond(Calendar calendarMillisecond) {
+ this.calendarMillisecond = calendarMillisecond;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @CalendarBridge( resolution = Resolution.SECOND )
+ public Calendar getCalendarSecond() {
+ return calendarSecond;
+ }
+
+ public void setCalendarSecond(Calendar calendarSecond) {
+ this.calendarSecond = calendarSecond;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Cloud.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CloudType.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CloudType.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CloudType.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,35 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public enum CloudType {
+ DRAGON,
+ HOUSE,
+ DOG,
+ EUROPE
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/CloudType.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/DateSplitBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/DateSplitBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/DateSplitBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+
+/**
+ * Store the date in 3 different fields - year, month, day - to ease Range Query per
+ * year, month or day (eg get all the elements of December for the last 5 years).
+ *
+ * @author Emmanuel Bernard
+ */
+public class DateSplitBridge implements FieldBridge {
+ private final static TimeZone GMT = TimeZone.getTimeZone("GMT");
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ Date date = (Date) value;
+ Calendar cal = GregorianCalendar.getInstance(GMT);
+ cal.setTime(date);
+ int year = cal.get(Calendar.YEAR);
+ int month = cal.get(Calendar.MONTH) + 1;
+ int day = cal.get(Calendar.DAY_OF_MONTH);
+
+ // set year
+ Field field = new Field(name + ".year", String.valueOf(year),
+ luceneOptions.getStore(), luceneOptions.getIndex(),
+ luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+
+ // set month and pad it if needed
+ field = new Field(name + ".month", month < 10 ? "0" : ""
+ + String.valueOf(month), luceneOptions.getStore(),
+ luceneOptions.getIndex(), luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+
+ // set day and pad it if needed
+ field = new Field(name + ".day", day < 10 ? "0" : ""
+ + String.valueOf(day), luceneOptions.getStore(),
+ luceneOptions.getIndex(), luceneOptions.getTermVector());
+ field.setBoost(luceneOptions.getBoost());
+ document.add(field);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/DateSplitBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Department.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Department.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Department.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,102 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+@ClassBridge(name="branchnetwork",
+ index=Index.TOKENIZED,
+ store=Store.YES,
+ impl = CatFieldsClassBridge.class,
+ params = @Parameter( name="sepChar", value=" " ) )
+public class Department {
+ private int id;
+ private String network;
+ private String branchHead;
+ private String branch;
+ private Integer maxEmployees;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getBranchHead() {
+ return branchHead;
+ }
+
+ public void setBranchHead(String branchHead) {
+ this.branchHead = branchHead;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getNetwork() {
+ return network;
+ }
+
+ public void setNetwork(String network) {
+ this.network = network;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getBranch() {
+ return branch;
+ }
+
+ public void setBranch(String branch) {
+ this.branch = branch;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Integer getMaxEmployees() {
+ return maxEmployees;
+ }
+
+ public void setMaxEmployees(Integer maxEmployees) {
+ this.maxEmployees = maxEmployees;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Department.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Departments.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Departments.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Departments.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,126 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.ClassBridge;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Parameter;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.ClassBridges;
+
+/**
+ * This is just a simple copy of the Department entity to allow
+ * separation of the tests for ClassBridge and ClassBridges.
+ *
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+@ClassBridges ( {
+ @ClassBridge(name="branchnetwork",
+ index= Index.TOKENIZED,
+ store= Store.YES,
+ impl = CatDeptsFieldsClassBridge.class,
+ params = @Parameter( name="sepChar", value=" " ) ),
+ @ClassBridge(name="equiptype",
+ index= Index.TOKENIZED,
+ store= Store.YES,
+ impl = EquipmentType.class,
+ params = {@Parameter( name="C", value="Cisco" ),
+ @Parameter( name="D", value="D-Link" ),
+ @Parameter( name="K", value="Kingston" ),
+ @Parameter( name="3", value="3Com" )
+ })
+})
+public class Departments {
+ private int id;
+ private String network;
+ private String manufacturer;
+ private String branchHead;
+ private String branch;
+ private Integer maxEmployees;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getBranchHead() {
+ return branchHead;
+ }
+
+ public void setBranchHead(String branchHead) {
+ this.branchHead = branchHead;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getNetwork() {
+ return network;
+ }
+
+ public void setNetwork(String network) {
+ this.network = network;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ public String getBranch() {
+ return branch;
+ }
+
+ public void setBranch(String branch) {
+ this.branch = branch;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ public Integer getMaxEmployees() {
+ return maxEmployees;
+ }
+
+ public void setMaxEmployees(Integer maxEmployees) {
+ this.maxEmployees = maxEmployees;
+ }
+
+ public String getManufacturer() {
+ return manufacturer;
+ }
+
+ public void setManufacturer(String manufacturer) {
+ this.manufacturer = manufacturer;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Departments.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/EquipmentType.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/EquipmentType.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/EquipmentType.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author John Griffin
+ */
+@SuppressWarnings("unchecked")
+public class EquipmentType implements FieldBridge, ParameterizedBridge {
+ private Map equips;
+
+ public void setParameterValues(Map parameters) {
+ // This map was defined by the parameters of the ClassBridge annotation.
+ this.equips = parameters;
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Departments deps = ( Departments ) value;
+ Field field;
+ String fieldValue1 = deps.getManufacturer();
+
+ if ( fieldValue1 != null ) {
+ String fieldValue = ( String ) equips.get( fieldValue1 );
+ field = new Field(
+ name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector()
+ );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/EquipmentType.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Gangster.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Gangster.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Gangster.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,60 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Gangster {
+ @Id @DocumentId
+ private Serializable id;
+ private String name;
+
+ public Serializable getId() {
+ return id;
+ }
+
+ public void setId(Serializable id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Gangster.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/PaddedIntegerBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/PaddedIntegerBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,62 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.hibernate.search.bridge.ParameterizedBridge;
+import org.hibernate.search.bridge.TwoWayStringBridge;
+
+/**
+ * Padding Integer bridge.
+ * All numbers will be padded with 0 to match 5 digits
+ *
+ * @author Emmanuel Bernard
+ */
+public class PaddedIntegerBridge implements TwoWayStringBridge, ParameterizedBridge {
+
+ public static final String PADDING_PROPERTY = "padding";
+
+ private int padding = 5; //default
+
+ public void setParameterValues(Map parameters) {
+ Object padding = parameters.get( PADDING_PROPERTY );
+ if (padding != null) this.padding = (Integer) padding;
+ }
+
+ public String objectToString(Object object) {
+ String rawInteger = object.toString();
+ if (rawInteger.length() > padding) throw new IllegalArgumentException( "Try to pad on a number too big" );
+ StringBuilder paddedInteger = new StringBuilder( );
+ for ( int padIndex = rawInteger.length() ; padIndex < padding ; padIndex++ ) {
+ paddedInteger.append('0');
+ }
+ return paddedInteger.append( rawInteger ).toString();
+ }
+
+ public Object stringToObject(String stringValue) {
+ return new Integer(stringValue);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/PaddedIntegerBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Student.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Student.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Student.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,67 @@
+package org.hibernate.search.test.bridge;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+import javax.persistence.*;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@Table(name = "student")
+public class Student {
+
+
+ private Long id;
+ private String name;
+ private String grade;
+ private Teacher teacher;
+
+
+ @Id
+ @GeneratedValue
+ @Column(name = "student_id")
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Column(name = "name")
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ @Column(name = "grade")
+ @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ public String getGrade() {
+ return grade;
+ }
+
+ public void setGrade(String grade) {
+ this.grade = grade;
+ }
+
+ @ManyToOne
+ @JoinColumn(name = "teacher_id")
+ public Teacher getTeacher() {
+ return teacher;
+ }
+
+ public void setTeacher(Teacher teacher) {
+ this.teacher = teacher;
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Student.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/StudentsSizeBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/StudentsSizeBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/StudentsSizeBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,30 @@
+package org.hibernate.search.test.bridge;
+
+import org.hibernate.search.bridge.TwoWayStringBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class StudentsSizeBridge implements TwoWayStringBridge {
+
+ public Object stringToObject(String stringValue) {
+ if (null == stringValue || stringValue.equals("")) {
+ return 0;
+ }
+ return Integer.parseInt(stringValue);
+ }
+
+ public String objectToString(Object object) {
+ if (object instanceof Teacher) {
+ Teacher teacher = (Teacher) object;
+ if (teacher.getStudents() != null && teacher.getStudents().size() > 0)
+ return String.valueOf(teacher.getStudents().size());
+ else
+ return null;
+ } else {
+ throw new IllegalArgumentException(StudentsSizeBridge.class +
+ " used on a non-Teacher type: " + object.getClass());
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/StudentsSizeBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Teacher.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Teacher.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Teacher.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+package org.hibernate.search.test.bridge;
+
+import org.hibernate.search.annotations.*;
+
+import javax.persistence.*;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@Table(name = "teacher")
+@ClassBridge(
+ name = "amount_of_students",
+ index = Index.UN_TOKENIZED,
+ store = Store.YES,
+ impl = StudentsSizeBridge.class
+)
+public class Teacher {
+
+ private Long id;
+ private String name;
+ private List<Student> students;
+
+
+ public Teacher() {
+ students = new ArrayList<Student>();
+ }
+
+ @Id
+ @GeneratedValue
+ @Column(name = "teacher_id")
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Column(name = "name")
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ @OneToMany(mappedBy = "teacher")
+ public List<Student> getStudents() {
+ return students;
+ }
+
+ public void setStudents(List<Student> students) {
+ this.students = students;
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/Teacher.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateFieldBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateFieldBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateFieldBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.util.StringHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TruncateFieldBridge implements FieldBridge {
+ public Object get(String name, Document document) {
+ Field field = document.getField( name );
+ return field.stringValue();
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ String indexedString = (String) value;
+ //Do not add fields on empty strings, seems a sensible default in most situations
+ if ( StringHelper.isNotEmpty( indexedString ) ) {
+ Field field = new Field(name, indexedString.substring(0,
+ indexedString.length() / 2), luceneOptions.getStore(),
+ luceneOptions.getIndex(), luceneOptions.getTermVector());
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateFieldBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateStringBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateStringBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateStringBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,49 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import java.util.Map;
+
+import org.hibernate.search.bridge.StringBridge;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class TruncateStringBridge implements StringBridge, ParameterizedBridge {
+ private int div;
+ public Object stringToObject(String stringValue) {
+ return stringValue;
+ }
+
+ public String objectToString(Object object) {
+ String string = (String) object;
+ return object != null ? string.substring( 0, string.length() / div ) : null;
+ }
+
+ public void setParameterValues(Map parameters) {
+ div = Integer.valueOf( (String) parameters.get( "dividedBy" ) );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/TruncateStringBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.bridge;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.cfg.AnnotationConfiguration;
+import junit.framework.TestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class UnresolvedBridgeTest extends TestCase {
+ public void testSerializableType() throws Exception {
+ AnnotationConfiguration cfg = new AnnotationConfiguration();
+
+ for (int i = 0; i < getMappings().length; i++) {
+ cfg.addAnnotatedClass( getMappings()[i] );
+ }
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ try {
+ cfg.buildSessionFactory();
+ fail("Undefined bridge went through");
+ }
+ catch( Exception e ) {
+ Throwable ee = e;
+ boolean hasSearchException = false;
+ for (;;) {
+ if (ee == null) {
+ break;
+ }
+ else if (ee instanceof SearchException) {
+ hasSearchException = true;
+ break;
+ }
+ ee = ee.getCause();
+ }
+ assertTrue( hasSearchException );
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Gangster.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/bridge/UnresolvedBridgeTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/Animal.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/Animal.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/Animal.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.classloading;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+
+/**
+ * Test class which is configured via XML and does not depend on Hibernate Annotations.
+ *
+ * @author Hardy Ferentschik
+ */
+@Indexed(index = "Animal")
+public class Animal {
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/Animal.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/NoAnnotationsTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/NoAnnotationsTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/NoAnnotationsTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.classloading;
+
+import java.util.List;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.Search;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class NoAnnotationsTest extends org.hibernate.search.test.TestCase {
+
+ /**
+ * Tests that @DocumentId is optional. See HSEARCH-104.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testConfigurationWithoutAnnotations() throws Exception {
+ Animal dog = new Animal();
+ dog.setName( "Dog" );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.save( dog );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ List results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "name", "dog" ) )
+ ).list();
+ assertEquals( 1, results.size() );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFlushListenerRegistrationWithoutAnnotations() throws Exception {
+ // This test should pass even if the flushListener is not registered,
+ // as a workaround is done in code (you'll see a warning in logs).
+ Animal pinguin = new Animal();
+ pinguin.setName( "Penguin" );
+
+ Session s = openSession();
+ s.save( pinguin );
+ s.flush();
+ s.clear();
+
+ Transaction tx = s.beginTransaction();
+ tx = s.beginTransaction();
+ List results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "name", "penguin" ) )
+ ).list();
+ assertEquals( 1, results.size() );
+ tx.commit();
+ s.close();
+ }
+
+ protected String[] getXmlFiles() {
+ return new String[] {
+ "org/hibernate/search/test/classloading/Animal.hbm.xml"
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/classloading/NoAnnotationsTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Address.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Address.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Address.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,103 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Calendar;
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class Address {
+ @Id
+ @GeneratedValue
+ private Long addressId;
+ private String street1;
+ private String street2;
+ @ManyToOne
+ private Country country;
+ private Date dateCreated;
+ private Calendar lastUpdated;
+ private String owner;
+
+ public Long getAddressId() {
+ return addressId;
+ }
+
+ public String getStreet1() {
+ return street1;
+ }
+
+ public void setStreet1(String street1) {
+ this.street1 = street1;
+ }
+
+ public Country getCountry() {
+ return country;
+ }
+
+ public void setCountry(Country country) {
+ this.country = country;
+ }
+
+ public String getStreet2() {
+ return street2;
+ }
+
+ public void setStreet2(String street2) {
+ this.street2 = street2;
+ }
+
+ public Calendar getLastUpdated() {
+ return lastUpdated;
+ }
+
+ public void setLastUpdated(Calendar lastUpdated) {
+ this.lastUpdated = lastUpdated;
+ }
+
+ public Date getDateCreated() {
+ return dateCreated;
+ }
+
+ public void setDateCreated(Date dateCreated) {
+ this.dateCreated = dateCreated;
+ }
+
+ public void setOwner(String owner) {
+ this.owner = owner;
+ }
+
+ public String getOwner() {
+ return this.owner;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Address.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/BlogEntry.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/BlogEntry.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/BlogEntry.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,127 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Date;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.apache.solr.analysis.EnglishPorterFilterFactory;
+import org.apache.solr.analysis.GermanStemFilterFactory;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+import org.hibernate.search.analyzer.Discriminator;
+import org.hibernate.search.annotations.AnalyzerDef;
+import org.hibernate.search.annotations.AnalyzerDefs;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.TokenFilterDef;
+import org.hibernate.search.annotations.TokenizerDef;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@AnalyzerDefs({
+ @AnalyzerDef(name = "en",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = EnglishPorterFilterFactory.class
+ )
+ }),
+ @AnalyzerDef(name = "de",
+ tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class),
+ filters = {
+ @TokenFilterDef(factory = LowerCaseFilterFactory.class),
+ @TokenFilterDef(factory = GermanStemFilterFactory.class)
+ })
+})
+public class BlogEntry {
+ private Long id;
+ private String language;
+ private String title;
+ private String description;
+ private Date dateCreated;
+
+ @Id @GeneratedValue
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getLanguage() {
+ return language;
+ }
+
+ public void setLanguage(String language) {
+ this.language = language;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public Date getDateCreated() {
+ return dateCreated;
+ }
+
+ public void setDateCreated(Date dateCreated) {
+ this.dateCreated = dateCreated;
+ }
+ public static class BlogLangDiscriminator implements Discriminator {
+
+ public String getAnalyzerDefinitionName(Object value, Object entity, String field) {
+ if ( value == null ) return null;
+ if ( !( value instanceof String ) )
+ throw new IllegalArgumentException( "expecte string as value in language discriminator");
+ if ( "description".equals( field ) ) {
+ return (String) value;
+ }
+ else {
+ //"title" is not affected
+ return null;
+ }
+
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/BlogEntry.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CatDeptsFieldsClassBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CatDeptsFieldsClassBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CatDeptsFieldsClassBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author John Griffin
+ */
+public class CatDeptsFieldsClassBridge implements FieldBridge, ParameterizedBridge {
+
+ private String sepChar;
+
+ @SuppressWarnings("unchecked")
+ public void setParameterValues(Map parameters) {
+ this.sepChar = (String) parameters.get( "sepChar" );
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Departments dep = (Departments) value;
+ String fieldValue1 = dep.getBranch();
+ if ( fieldValue1 == null ) {
+ fieldValue1 = "";
+ }
+ String fieldValue2 = dep.getNetwork();
+ if ( fieldValue2 == null ) {
+ fieldValue2 = "";
+ }
+ String fieldValue = fieldValue1 + sepChar + fieldValue2;
+ Field field = new Field( name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector() );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CatDeptsFieldsClassBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationParseHelperTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationParseHelperTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationParseHelperTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Properties;
+
+import junit.framework.TestCase;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.configuration.ConfigurationParseHelper;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class ConfigurationParseHelperTest extends TestCase {
+
+ public void testIntegerParsers() {
+ assertEquals( 0, ConfigurationParseHelper.parseInt( " 0 ", "not important") );
+ assertEquals( 8, ConfigurationParseHelper.parseInt( null, 8, null ) );
+ assertEquals( 56, ConfigurationParseHelper.parseInt( "56", 8, null ) );
+ Properties props = new Properties();
+ props.setProperty( "value1", "58" );
+ assertEquals( 58, ConfigurationParseHelper.getIntValue( props, "value1", 8 ) );
+ assertEquals( 8, ConfigurationParseHelper.getIntValue( props, "value2", 8 ) );
+ props.setProperty( "value2", "nand" );
+ boolean exceptionLaunched = false;
+ try {
+ ConfigurationParseHelper.getIntValue( props, "value2", 8 );
+ } catch (SearchException e) {
+ exceptionLaunched = true;
+ }
+ assertTrue( exceptionLaunched );
+ }
+
+ public void testBooleanParsers() {
+ assertTrue( ConfigurationParseHelper.parseBoolean( "true", null ) );
+ assertTrue( ConfigurationParseHelper.parseBoolean( " True ", null ) );
+ assertFalse( ConfigurationParseHelper.parseBoolean( "false", null ) );
+ assertFalse( ConfigurationParseHelper.parseBoolean( " False ", null ) );
+ boolean exceptionLaunched = false;
+ try {
+ ConfigurationParseHelper.parseBoolean( "5", "error" );
+ } catch (SearchException e) {
+ exceptionLaunched = true;
+ }
+ assertTrue( exceptionLaunched );
+ exceptionLaunched = false;
+ try {
+ ConfigurationParseHelper.parseBoolean( null, "error" );
+ } catch (SearchException e) {
+ exceptionLaunched = true;
+ }
+ assertTrue( exceptionLaunched );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationParseHelperTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,133 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.configuration.IndexWriterSetting;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * Contains some utility methods to simplify coding of
+ * testcases about configuration parsing.
+ *
+ * @author Sanne Grinovero
+ */
+public abstract class ConfigurationReadTestCase extends SearchTestCase {
+
+ private SearchFactoryImplementor searchFactory;
+
+ protected enum TransactionType {
+ TRANSACTION, BATCH
+ }
+
+ public ConfigurationReadTestCase() {
+
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
+ searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
+ fullTextSession.close();
+ FileHelper.delete( getBaseIndexDir() );
+ getBaseIndexDir().mkdirs();
+ }
+
+ protected final void assertValueIsDefault(Class testEntity, TransactionType parmGroup, IndexWriterSetting setting) {
+ assertValueIsDefault( testEntity, 0, parmGroup, setting );
+ }
+
+ protected final void assertValueIsDefault(Class testEntity, int shard, TransactionType parmGroup, IndexWriterSetting setting) {
+ boolean batch = isBatch( parmGroup );
+ assertNull( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey() + " : value was expected unset!",
+ getParameter( shard, batch, setting, testEntity ) );
+ }
+
+ protected final void assertValueIsSet(Class testEntity, TransactionType parmGroup, IndexWriterSetting setting, int expectedValue) {
+ assertValueIsSet( testEntity, 0, parmGroup, setting, expectedValue );
+ }
+
+ protected final void assertValueIsSet(Class testEntity, int shard, TransactionType parmGroup, IndexWriterSetting setting, int expectedValue) {
+ boolean batch = isBatch( parmGroup );
+ assertNotNull( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey(),
+ getParameter( shard, batch, setting, testEntity ) );
+ assertEquals( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey(), expectedValue,
+ (int) getParameter( shard, batch, setting, testEntity ) );
+ }
+
+ protected final SearchFactoryImplementor getSearchFactory() {
+ return searchFactory;
+ }
+
+ private boolean isBatch(TransactionType parmGroup) {
+ return parmGroup == TransactionType.BATCH;
+ }
+
+ private Integer getParameter(int shard, boolean batch, IndexWriterSetting setting, Class testEntity) {
+ if ( batch ) {
+ return searchFactory.getIndexingParameters( searchFactory.getDirectoryProviders( testEntity )[shard] )
+ .getBatchIndexParameters().getCurrentValueFor( setting );
+ }
+ else {
+ return searchFactory.getIndexingParameters( searchFactory.getDirectoryProviders( testEntity )[shard] )
+ .getTransactionIndexParameters().getCurrentValueFor( setting );
+ }
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.indexBase", getBaseIndexDir().getAbsolutePath() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ FileHelper.delete( getBaseIndexDir() );
+ }
+
+ public static void assertCfgIsInvalid(AnnotationConfiguration configuration, Class[] mapping) {
+ try {
+ for ( Class annotated : mapping ) {
+ ( configuration ).addAnnotatedClass( annotated );
+ }
+ configuration.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ configuration.buildSessionFactory();
+ fail();
+ } catch (HibernateException e) {
+ //thrown exceptions means the test is ok when caused by a SearchException
+ Throwable cause = e.getCause();
+ assertTrue( cause instanceof SearchException );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Country.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Country.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Country.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class Country {
+ @Id
+ @GeneratedValue
+ private Long id;
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ private String name;
+
+ @OneToMany(mappedBy = "country")
+ private Set<Address> addresses = new HashSet<Address>();
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public void addAddress(Address address) {
+ addresses.add( address );
+ }
+
+ public Set<Address> getAddresses() {
+ return this.addresses;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Country.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBackendTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBackendTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBackendTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import junit.framework.TestCase;
+
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.blackhole.BlackHoleBackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.lucene.LuceneBackendQueueProcessorFactory;
+import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class CustomBackendTest extends TestCase {
+
+ public void test() {
+ verifyBackendUsage( "blackhole", BlackHoleBackendQueueProcessorFactory.class );
+ verifyBackendUsage( "lucene", LuceneBackendQueueProcessorFactory.class );
+ verifyBackendUsage( BlackHoleBackendQueueProcessorFactory.class );
+ verifyBackendUsage( LuceneBackendQueueProcessorFactory.class );
+ }
+
+ private void verifyBackendUsage(String name, Class<? extends BackendQueueProcessorFactory> backendType) {
+ FullTextSessionBuilder builder = new FullTextSessionBuilder();
+ FullTextSession ftSession = builder
+ .setProperty( "hibernate.search.worker.backend", name )
+ .openFullTextSession();
+ SearchFactoryImpl searchFactory = (SearchFactoryImpl) ftSession.getSearchFactory();
+ ftSession.close();
+ assertEquals( backendType, searchFactory.getBackendQueueProcessorFactory().getClass() );
+ builder.close();
+ }
+
+ public void verifyBackendUsage(Class<? extends BackendQueueProcessorFactory> backendType) {
+ verifyBackendUsage( backendType.getName(), backendType );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBackendTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,42 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * Example for a custom <code>BoostStrategy</code> implementation.
+ *
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.engine.BoostStrategy
+ */
+public class CustomBoostStrategy implements BoostStrategy {
+
+ public float defineBoost(Object value) {
+ DynamicBoostedDescLibrary indexed = ( DynamicBoostedDescLibrary ) value;
+ return indexed.getDynScore();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomBoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomFieldBoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomFieldBoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomFieldBoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * Example for a custom <code>BoostStrategy</code> implementation.
+ *
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.engine.BoostStrategy
+ */
+public class CustomFieldBoostStrategy implements BoostStrategy {
+
+ public float defineBoost(Object value) {
+ String name = ( String ) value;
+ if ( "foobar".equals( name ) ) {
+ return 3.0f;
+ }
+ else {
+ return 1.0f;
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/CustomFieldBoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Departments.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Departments.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Departments.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+/**
+ * This is just a simple copy of the Department entity to allow
+ * separation of the tests for ClassBridge and ClassBridges.
+ *
+ * @author John Griffin
+ */
+@Entity
+public class Departments {
+ @Id
+ @GeneratedValue
+ private int deptsId;
+
+ private String network;
+ private String manufacturer;
+ private String branchHead;
+ private String branch;
+ private Integer maxEmployees;
+
+ public int getDeptsId() {
+ return deptsId;
+ }
+
+ public void setDeptsId(int deptsId) {
+ this.deptsId = deptsId;
+ }
+
+ public String getBranchHead() {
+ return branchHead;
+ }
+
+ public void setBranchHead(String branchHead) {
+ this.branchHead = branchHead;
+ }
+
+ public String getNetwork() {
+ return network;
+ }
+
+ public void setNetwork(String network) {
+ this.network = network;
+ }
+
+ public String getBranch() {
+ return branch;
+ }
+
+ public void setBranch(String branch) {
+ this.branch = branch;
+ }
+
+ public Integer getMaxEmployees() {
+ return maxEmployees;
+ }
+
+ public void setMaxEmployees(Integer maxEmployees) {
+ this.maxEmployees = maxEmployees;
+ }
+
+ public String getManufacturer() {
+ return manufacturer;
+ }
+
+ public void setManufacturer(String manufacturer) {
+ this.manufacturer = manufacturer;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Departments.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/DynamicBoostedDescLibrary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/DynamicBoostedDescLibrary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/DynamicBoostedDescLibrary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+/**
+ * Test entity using a custom <code>CustomBoostStrategy</code> to set
+ * the document boost as the dynScore field.
+ *
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class DynamicBoostedDescLibrary {
+
+ @Id
+ @GeneratedValue
+ private int libraryId;
+ private float dynScore;
+ private String name;
+
+ public DynamicBoostedDescLibrary() {
+ dynScore = 1.0f;
+ }
+
+
+ public int getLibraryId() {
+ return libraryId;
+ }
+
+ public void setLibraryId(int id) {
+ this.libraryId = id;
+ }
+
+ public float getDynScore() {
+ return dynScore;
+ }
+
+ public void setDynScore(float dynScore) {
+ this.dynScore = dynScore;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/DynamicBoostedDescLibrary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EquipmentType.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EquipmentType.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EquipmentType.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+import org.hibernate.search.bridge.FieldBridge;
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.ParameterizedBridge;
+
+/**
+ * @author John Griffin
+ */
+@SuppressWarnings("unchecked")
+public class EquipmentType implements FieldBridge, ParameterizedBridge {
+ private Map equips;
+
+ public void setParameterValues(Map parameters) {
+ // This map was defined by the parameters of the ClassBridge annotation.
+ this.equips = parameters;
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ // In this particular class the name of the new field was passed
+ // from the name field of the ClassBridge Annotation. This is not
+ // a requirement. It just works that way in this instance. The
+ // actual name could be supplied by hard coding it below.
+ Departments deps = ( Departments ) value;
+ Field field;
+ String fieldValue1 = deps.getManufacturer();
+
+ if ( fieldValue1 != null ) {
+ String fieldValue = ( String ) equips.get( fieldValue1 );
+ field = new Field(
+ name, fieldValue, luceneOptions.getStore(), luceneOptions.getIndex(), luceneOptions.getTermVector()
+ );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EquipmentType.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EventListenerRegisterTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EventListenerRegisterTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EventListenerRegisterTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,199 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.Properties;
+
+import org.hibernate.event.EventListeners;
+import org.hibernate.event.PostCollectionRecreateEvent;
+import org.hibernate.event.PostCollectionRecreateEventListener;
+import org.hibernate.event.PostCollectionRemoveEvent;
+import org.hibernate.event.PostCollectionRemoveEventListener;
+import org.hibernate.event.PostCollectionUpdateEvent;
+import org.hibernate.event.PostCollectionUpdateEventListener;
+import org.hibernate.event.PostDeleteEvent;
+import org.hibernate.event.PostDeleteEventListener;
+import org.hibernate.event.PostInsertEvent;
+import org.hibernate.event.PostInsertEventListener;
+import org.hibernate.event.PostUpdateEvent;
+import org.hibernate.event.PostUpdateEventListener;
+import org.hibernate.search.Environment;
+import org.hibernate.search.event.EventListenerRegister;
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.event.FullTextIndexCollectionEventListener;
+
+import junit.framework.TestCase;
+
+/**
+ * @author Sanne Grinovero
+ */
+@SuppressWarnings("deprecation")
+public class EventListenerRegisterTest extends TestCase {
+
+ public void testRegisterOnEmptyListeners_CfgDisabled() {
+ EventListeners evListeners = new EventListeners();
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( false ) );
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( false ) );
+ assertPresence( false, evListeners );
+ }
+
+ public void testRegisterOnEmptyListeners_CfgEnabled() {
+ EventListeners evListeners = new EventListeners();
+ //tests registering multiple times is idempotent:
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( true ) );
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( true ) );
+ assertPresence( true, evListeners );
+ }
+
+ public void testRegisterOnEmptyListeners_CfgAuto() {
+ EventListeners evListeners = new EventListeners();
+ EventListenerRegister.enableHibernateSearch( evListeners, new Properties() );
+ EventListenerRegister.enableHibernateSearch( evListeners, new Properties() );
+ assertPresence( true, evListeners );
+ }
+
+ public void testOnAlreadyRegistered() {
+ helperOnAlreadyRegistered( new FullTextIndexEventListener() );
+ }
+
+ public void testOnAlreadyRegisteredDeprecated() {
+ helperOnAlreadyRegistered( new FullTextIndexCollectionEventListener() );
+ }
+
+ public void testOnPopulatedEventListeners() {
+ EventListeners evListeners = makeSomeEventListeners();
+ EventListenerRegister.enableHibernateSearch( evListeners, new Properties() );
+ EventListenerRegister.enableHibernateSearch( evListeners, new Properties() );
+ assertPresence( true, evListeners );
+ }
+
+ private void helperOnAlreadyRegistered(FullTextIndexEventListener listenerFullText) {
+
+ AnotherListener listenerA = new AnotherListener();
+ AnotherListener listenerB = new AnotherListener();
+
+ EventListeners evListeners = new EventListeners();
+ evListeners.setPostInsertEventListeners(
+ new PostInsertEventListener[]{ listenerA, listenerB, listenerFullText } );
+ evListeners.setPostUpdateEventListeners(
+ new PostUpdateEventListener[]{ listenerA, listenerB, listenerFullText } );
+ evListeners.setPostDeleteEventListeners(
+ new PostDeleteEventListener[]{ listenerA, listenerB, listenerFullText } );
+ evListeners.setPostCollectionRecreateEventListeners(
+ new PostCollectionRecreateEventListener[]{ listenerA, listenerB, listenerFullText } );
+ evListeners.setPostCollectionRemoveEventListeners(
+ new PostCollectionRemoveEventListener[]{ listenerA, listenerB, listenerFullText } );
+ evListeners.setPostCollectionUpdateEventListeners(
+ new PostCollectionUpdateEventListener[]{ listenerA, listenerB, listenerFullText } );
+
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( false ) );
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( false ) );
+ EventListenerRegister.enableHibernateSearch( evListeners, makeConfiguration( false ) );
+ assertPresence( true, evListeners );
+ }
+
+ private EventListeners makeSomeEventListeners() {
+
+ AnotherListener listenerA = new AnotherListener();
+ AnotherListener listenerB = new AnotherListener();
+ AnotherListener listenerC = new AnotherListener();
+
+ EventListeners evListeners = new EventListeners();
+ evListeners.setPostInsertEventListeners(
+ new PostInsertEventListener[]{ listenerA, listenerB, listenerC } );
+ evListeners.setPostUpdateEventListeners(
+ new PostUpdateEventListener[]{ listenerA, listenerB, listenerC } );
+ evListeners.setPostDeleteEventListeners(
+ new PostDeleteEventListener[]{ listenerA, listenerB, listenerC } );
+ evListeners.setPostCollectionRecreateEventListeners(
+ new PostCollectionRecreateEventListener[]{ listenerA, listenerB, listenerC } );
+ evListeners.setPostCollectionRemoveEventListeners(
+ new PostCollectionRemoveEventListener[]{ listenerA, listenerB, listenerC } );
+ evListeners.setPostCollectionUpdateEventListeners(
+ new PostCollectionUpdateEventListener[]{ listenerA, listenerB, listenerC } );
+
+ return evListeners;
+ }
+
+ private void assertPresence(boolean expected, EventListeners evListeners) {
+ assertEquals( expected, isPresent( evListeners.getPostInsertEventListeners() ) );
+ assertEquals( expected, isPresent( evListeners.getPostUpdateEventListeners() ) );
+ assertEquals( expected, isPresent( evListeners.getPostDeleteEventListeners() ) );
+ assertEquals( expected, isPresent( evListeners.getPostCollectionRecreateEventListeners() ) );
+ assertEquals( expected, isPresent( evListeners.getPostCollectionRemoveEventListeners() ) );
+ assertEquals( expected, isPresent( evListeners.getPostCollectionUpdateEventListeners() ) );
+ }
+
+ private static Properties makeConfiguration(boolean enableSearch) {
+ Properties p = new Properties();
+ p.setProperty( Environment.AUTOREGISTER_LISTENERS, String.valueOf( enableSearch ) );
+ return p;
+ }
+
+ private static boolean isPresent(Object[] listeners) {
+ if (listeners==null)
+ return false;
+ boolean found = false; // to verify class present at most once.
+ for (Object eventListener : listeners) {
+ if ( FullTextIndexEventListener.class == eventListener.getClass() ) {
+ assertFalse( found );
+ found = true;
+ }
+ if ( FullTextIndexCollectionEventListener.class == eventListener.getClass() ) {
+ assertFalse( found );
+ found = true;
+ }
+ }
+ return found;
+ }
+
+ private static class AnotherListener implements PostDeleteEventListener,
+ PostInsertEventListener, PostUpdateEventListener,
+ PostCollectionRecreateEventListener, PostCollectionRemoveEventListener,
+ PostCollectionUpdateEventListener {
+
+ //empty methods: just needing any implementation of these listeners.
+
+ public void onPostDelete(PostDeleteEvent event) {
+ }
+
+ public void onPostInsert(PostInsertEvent event) {
+ }
+
+ public void onPostUpdate(PostUpdateEvent event) {
+ }
+
+ public void onPostRecreateCollection(PostCollectionRecreateEvent event) {
+ }
+
+ public void onPostRemoveCollection(PostCollectionRemoveEvent event) {
+ }
+
+ public void onPostUpdateCollection(PostCollectionUpdateEvent event) {
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/EventListenerRegisterTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Item.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Item.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Item.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+
+@Entity
+public class Item {
+
+ @Id @GeneratedValue
+ private Integer id;
+ private String description;
+
+
+ @ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.REMOVE } )
+ private ProductCatalog productCatalog;
+
+ public ProductCatalog getProductCatalog() {
+ return productCatalog;
+ }
+
+ public void setProductCatalog(ProductCatalog productCatalog) {
+ this.productCatalog = productCatalog;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/Item.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,142 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.hibernate.search.backend.LuceneIndexingParameters;
+import org.hibernate.search.test.Document;
+import org.hibernate.search.test.SerializationTestHelper;
+import org.hibernate.search.test.query.Author;
+import org.hibernate.search.test.query.Book;
+
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_BUFFERED_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_MERGE_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MERGE_FACTOR;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.RAM_BUFFER_SIZE;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.USE_COMPOUND_FILE;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_FIELD_LENGTH;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.TRANSACTION;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.BATCH;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class LuceneIndexingParametersTest extends ConfigurationReadTestCase {
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+
+ cfg.setProperty( "hibernate.search.default.batch.ram_buffer_size", "1" );
+ cfg.setProperty( "hibernate.search.default.transaction.use_compound_file", "false" );
+ cfg.setProperty( "hibernate.search.default.batch.use_compound_file", "true" ); //should see a warning about this
+//set by super : cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
+
+ cfg.setProperty( "hibernate.search.default.transaction.ram_buffer_size", "2" );
+ cfg.setProperty( "hibernate.search.default.transaction.max_merge_docs", "9" );
+//set by super : cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
+ cfg.setProperty( "hibernate.search.default.transaction.max_buffered_docs", "11" );
+
+ cfg.setProperty( "hibernate.search.Book.batch.max_merge_docs", "12" );
+ cfg.setProperty( "hibernate.search.Book.transaction.use_compound_file", "false" );
+ cfg.setProperty( "hibernate.search.Book.batch.merge_factor", "13" );
+ // new keyword "indexwriter" is also supported to group parameters:
+ cfg.setProperty( "hibernate.search.Book.indexwriter.batch.max_buffered_docs", "14" );
+
+ cfg.setProperty( "hibernate.search.Book.indexwriter.transaction.ram_buffer_size", "4" );
+ cfg.setProperty( "hibernate.search.Book.transaction.max_merge_docs", "15" );
+ cfg.setProperty( "hibernate.search.Book.transaction.merge_factor", "16" );
+ cfg.setProperty( "hibernate.search.Book.transaction.max_buffered_docs", "17" );
+
+ cfg.setProperty( "hibernate.search.Documents.transaction.ram_buffer_size", "default" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_merge_docs", "5" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.merge_factor", "6" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_buffered_docs", "7" );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_merge_docs", "9" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_field_length", "7" );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_field_length", "9" );
+ }
+
+ public void testDefaultIndexProviderParameters() {
+ assertValueIsSet( Author.class, BATCH, USE_COMPOUND_FILE, 1 );
+ assertValueIsSet( Author.class, TRANSACTION, RAM_BUFFER_SIZE, 2 );
+ assertValueIsSet( Author.class, TRANSACTION, MAX_MERGE_DOCS, 9 );
+ assertValueIsSet( Author.class, TRANSACTION, MAX_BUFFERED_DOCS, 11 );
+ assertValueIsSet( Author.class, TRANSACTION, MERGE_FACTOR, 100 );
+ }
+
+ public void testBatchParametersGlobals() {
+ assertValueIsSet( Author.class, BATCH, RAM_BUFFER_SIZE, 1 );
+ assertValueIsDefault( Author.class, BATCH, MAX_MERGE_DOCS );
+ assertValueIsSet( Author.class, BATCH, MAX_BUFFERED_DOCS, 1000 );
+ }
+
+ public void testMaxFieldLength() {
+ // there should also be logged a warning being logged about these:
+ assertValueIsSet( Document.class, TRANSACTION, MAX_FIELD_LENGTH, 7 );
+ assertValueIsSet( Document.class, BATCH, MAX_FIELD_LENGTH, 9 );
+ }
+
+ public void testExplicitBatchParameters() {
+ assertValueIsSet( Book.class, BATCH, MAX_MERGE_DOCS, 12 );
+ assertValueIsSet( Book.class, BATCH, MAX_BUFFERED_DOCS, 14 );
+ assertValueIsSet( Book.class, BATCH, MERGE_FACTOR, 13 );
+ assertValueIsSet( Book.class, TRANSACTION, USE_COMPOUND_FILE, 0 );
+ }
+
+ public void testInheritedBatchParameters() {
+ assertValueIsSet( Book.class, BATCH, RAM_BUFFER_SIZE, 1 );
+ }
+
+ public void testTransactionParameters() {
+ assertValueIsSet( Book.class, TRANSACTION, RAM_BUFFER_SIZE, 4 );
+ assertValueIsSet( Book.class, TRANSACTION, MAX_MERGE_DOCS, 15 );
+ assertValueIsSet( Book.class, TRANSACTION, MAX_BUFFERED_DOCS, 17 );
+ assertValueIsSet( Book.class, TRANSACTION, MERGE_FACTOR, 16 );
+ }
+
+ public void testDefaultKeywordOverwritesInherited() {
+ assertValueIsDefault( Document.class, TRANSACTION, RAM_BUFFER_SIZE );
+ assertValueIsDefault( Document.class, TRANSACTION, RAM_BUFFER_SIZE );
+ }
+
+ public void testSerializability() throws IOException, ClassNotFoundException {
+ LuceneIndexingParameters param = new LuceneIndexingParameters( new Properties() );
+ LuceneIndexingParameters paramCopy = (LuceneIndexingParameters)
+ SerializationTestHelper.duplicateBySerialization( param );
+ assertEquals( param.getBatchIndexParameters(), paramCopy.getBatchIndexParameters() );
+ assertEquals( param.getTransactionIndexParameters(), paramCopy.getTransactionIndexParameters() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Author.class,
+ Document.class
+ };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/MaskedPropertiesTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/MaskedPropertiesTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/MaskedPropertiesTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,111 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.io.IOException;
+import java.util.Properties;
+import java.util.Enumeration;
+
+import org.hibernate.search.backend.configuration.MaskedProperty;
+import org.hibernate.search.test.SerializationTestHelper;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class MaskedPropertiesTest extends junit.framework.TestCase {
+
+ public void testConfigurationParsingPrecedence() {
+ Properties cfg = new Properties();
+ cfg.put( "hibernate.search.Animals.transaction.indexwriter.max_merge_docs", "1" );
+ cfg.put( "hibernate.search.Animals.2.transaction.indexwriter.max_merge_docs", "2" );
+ cfg.put( "hibernate.search.Animals.2.transaction.max_merge_docs", "3" );
+ cfg.put( "hibernate.search.Animals.transaction.max_merge_docs", "5" );
+ cfg.put( "hibernate.search.default.transaction.max_merge_docs", "6" );
+ cfg.put( "hibernate.search.default.transaction.indexwriter.max_field_length", "7" );
+ cfg.put( "hibernate.notsearch.tests.default", "7" );
+
+ //this is more a "concept demo" than a test:
+ Properties root = new MaskedProperty( cfg, "hibernate.search" );
+ //only keys starting as "hibernate.search.default" are exposed:
+ Properties common = new MaskedProperty( root, "default" );
+ //now as "hibernate.search.Animals" or "hibernate.search.default" if first fails:
+ Properties dirProvider = new MaskedProperty( root, "Animals", common );
+ //this narrows visibility to "hibernate.search.<providername|default>.transaction":
+ Properties transaction = new MaskedProperty( dirProvider, "transaction" );
+ Properties shard2 = new MaskedProperty( dirProvider, "2", dirProvider );
+ Properties transactionInShard2 = new MaskedProperty( shard2, "transaction", transaction );
+ Properties newStyleTransaction = new MaskedProperty( transaction, "indexwriter", transaction );
+ Properties newStyleTransactionInShard2 = new MaskedProperty(
+ transactionInShard2, "indexwriter", transactionInShard2 );
+
+ assertEquals( "7" , newStyleTransaction.getProperty( "max_field_length" ) );
+ assertEquals( "7" , newStyleTransactionInShard2.getProperty( "max_field_length" ) );
+ assertEquals( "5" , transaction.getProperty( "max_merge_docs" ) );
+
+ Enumeration<?> propertyNames = newStyleTransaction.propertyNames();
+ int count = 0;
+ while ( propertyNames.hasMoreElements() ) {
+ count++;
+ System.out.println( propertyNames.nextElement() );
+ }
+ }
+
+ public void testSerializability() throws IOException, ClassNotFoundException {
+ Properties cfg = new Properties();
+ cfg.setProperty( "base.key", "value" );
+ MaskedProperty originalProps = new MaskedProperty( cfg, "base" );
+ MaskedProperty theCopy = (MaskedProperty)
+ SerializationTestHelper.duplicateBySerialization( originalProps );
+ //this is also testing the logger (transient) has been restored:
+ assertEquals( "value", theCopy.getProperty( "key" ) );
+ }
+
+ public void testListingKeys() {
+ Properties defaultProp = new Properties();
+ defaultProp.put( "some.inherited.prop", "to test standard Properties fallback behaviour" );
+ Properties rootProp = new Properties( defaultProp );
+ rootProp.put( "some.long.dotted.prop1", "hello!" );
+ rootProp.put( "hidden.long.dotted.prop2", "hello again" );
+ Properties fallbackProp = new Properties();
+ fallbackProp.put( "default.long.dotted.prop3", "hello!" );
+ Properties masked = new MaskedProperty( rootProp, "some", fallbackProp );
+
+ assertTrue( masked.keySet().contains( "long.dotted.prop1" ) );
+ assertTrue( masked.keySet().contains( "default.long.dotted.prop3" ) );
+ assertTrue( masked.keySet().contains( "inherited.prop" ) );
+ assertFalse( masked.keySet().contains( "hidden.long.dotted.prop2" ) );
+ assertFalse( masked.keySet().contains( "long.dotted.prop2" ) );
+
+ Properties maskedAgain = new MaskedProperty( masked, "long.dotted", masked ); //falling back to same instance for **
+ assertTrue( maskedAgain.keySet().contains( "prop1" ) );
+ assertTrue( maskedAgain.keySet().contains( "long.dotted.prop1" ) ); //**: prop 1 should be visible in both ways
+ assertTrue( maskedAgain.keySet().contains( "default.long.dotted.prop3" ) );
+
+ Properties maskingAll = new MaskedProperty( masked, "secured" );
+ assertTrue( maskingAll.keySet().isEmpty() );
+ assertTrue( maskingAll.isEmpty() );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/MaskedPropertiesTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProductCatalog.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProductCatalog.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProductCatalog.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.hibernate.annotations.Cascade;
+import org.hibernate.annotations.IndexColumn;
+
+@Entity
+public class ProductCatalog {
+
+ @Id @GeneratedValue
+ private Integer id;
+ private String name;
+
+ @OneToMany(fetch = FetchType.LAZY)
+ @IndexColumn(name = "list_position")
+ @Cascade(org.hibernate.annotations.CascadeType.ALL)
+ private List<Item> items = new ArrayList<Item>();
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+
+ public List<Item> getItems() {
+ return items;
+ }
+
+ public void addItem(Item item) {
+ this.items.add(item);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProductCatalog.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticMappingTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticMappingTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticMappingTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,758 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.lang.annotation.ElementType;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.List;
+import java.util.TimeZone;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.NGramFilterFactory;
+import org.apache.solr.analysis.SnowballPorterFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.Search;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.cfg.SearchMapping;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.analyzer.inheritance.ISOLatin1Analyzer;
+import org.hibernate.search.test.id.providedId.ManualTransactionContext;
+import org.hibernate.search.util.LoggerFactory;
+import org.slf4j.Logger;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ProgrammaticMappingTest extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ public void testMapping() throws Exception{
+ Address address = new Address();
+ address.setStreet1( "3340 Peachtree Rd NE" );
+ address.setStreet2( "JBoss" );
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( address );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "" + address.getAddressId() );
+ System.out.println(luceneQuery.toString( ));
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ assertEquals( "documenId does not work properly", 1, query.getResultSize() );
+
+ luceneQuery = parser.parse( "street1:peachtree" );
+ query = s.createFullTextQuery( luceneQuery ).setProjection( "idx_street2", FullTextQuery.THIS );
+ assertEquals( "Not properly indexed", 1, query.getResultSize() );
+ Object[] firstResult = (Object[]) query.list().get( 0 );
+ assertEquals( "@Field.store not respected", "JBoss", firstResult[0] );
+
+ s.delete( firstResult[1] );
+ tx.commit();
+ s.close();
+ }
+
+ public void testAnalyzerDef() throws Exception{
+ Address address = new Address();
+ address.setStreet1( "3340 Peachtree Rd NE" );
+ address.setStreet2( "JBoss" );
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( address );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "street1_ngram:pea" );
+
+ final FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ assertEquals( "Analyzer inoperant", 1, query.getResultSize() );
+
+ s.delete( query.list().get( 0 ));
+ tx.commit();
+ s.close();
+ }
+
+ public void testBridgeMapping() throws Exception{
+ Address address = new Address();
+ address.setStreet1( "Peachtree Rd NE" );
+ address.setStreet2( "JBoss" );
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ s.persist( address );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "street1:peac" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ assertEquals( "PrefixQuery should not be on", 0, query.getResultSize() );
+
+ luceneQuery = parser.parse( "street1_abridged:peac" );
+ query = s.createFullTextQuery( luceneQuery );
+ assertEquals( "Bridge not used", 1, query.getResultSize() );
+
+ s.delete( query.list().get( 0 ) );
+ tx.commit();
+ s.close();
+ }
+
+ public void testBoost() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Address address = new Address();
+ address.setStreet1( "Peachtree Rd NE" );
+ address.setStreet2( "Peachtnot Rd NE" );
+ s.persist( address );
+
+ address = new Address();
+ address.setStreet1( "Peachtnot Rd NE" );
+ address.setStreet2( "Peachtree Rd NE" );
+ s.persist( address );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "street1:peachtree OR idx_street2:peachtree" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting two results", 2, query.getResultSize() );
+
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ assertTrue( "first result should be strictly higher", (Float) results.get( 0 )[1] > (Float) results.get( 1 )[1]*1.9f );
+ assertEquals( "Wrong result ordered", address.getStreet1(), ( (Address) results.get( 0 )[0] ).getStreet1() );
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testAnalyzerDiscriminator() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ BlogEntry deEntry = new BlogEntry();
+ deEntry.setTitle( "aufeinanderschl\u00FCgen" );
+ deEntry.setDescription( "aufeinanderschl\u00FCgen" );
+ deEntry.setLanguage( "de" );
+ s.persist( deEntry );
+
+ BlogEntry enEntry = new BlogEntry();
+ enEntry.setTitle( "acknowledgment" );
+ enEntry.setDescription( "acknowledgment" );
+ enEntry.setLanguage( "en" );
+ s.persist( enEntry );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ // at query time we use a standard analyzer. We explicitly search for tokens which can only be found if the
+ // right language specific stemmer was used at index time
+ assertEquals( 1, nbrOfMatchingResults( "description", "aufeinanderschlug", s ) );
+ assertEquals( 1, nbrOfMatchingResults( "description", "acknowledg", s ) );
+ assertEquals( 0, nbrOfMatchingResults( "title", "aufeinanderschlug", s ) );
+ assertEquals( 1, nbrOfMatchingResults( "title", "acknowledgment", s ) );
+
+ for( Object result : s.createQuery( "from " + BlogEntry.class.getName() ).list() ) {
+ s.delete( result );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testDateBridgeMapping() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Address address = new Address();
+ address.setStreet1( "Peachtree Rd NE" );
+ address.setStreet2( "Peachtnot Rd NE" );
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set( 2009, Calendar.NOVEMBER, 15);
+
+ Date date = new Date( c.getTimeInMillis() );
+ address.setDateCreated(date);
+ s.persist( address );
+
+ address = new Address();
+ address.setStreet1( "Peachtnot Rd NE" );
+ address.setStreet2( "Peachtree Rd NE" );
+ address.setDateCreated(date);
+ s.persist( address );
+
+ BlogEntry enEntry = new BlogEntry();
+ enEntry.setTitle( "acknowledgment" );
+ enEntry.setDescription( "acknowledgment" );
+ enEntry.setLanguage( "en" );
+ enEntry.setDateCreated(date);
+ s.persist( enEntry );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "date-created:20091115 OR blog-entry-created:20091115" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 3 results", 3, query.getResultSize() );
+
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testCalendarBridgeMapping() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Address address = new Address();
+ address.setStreet1( "Peachtree Rd NE" );
+ address.setStreet2( "Peachtnot Rd NE" );
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set( 2009, Calendar.NOVEMBER, 15);
+
+ address.setLastUpdated(c);
+ s.persist( address );
+
+ address = new Address();
+ address.setStreet1( "Peachtnot Rd NE" );
+ address.setStreet2( "Peachtree Rd NE" );
+ address.setLastUpdated(c);
+ s.persist( address );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "last-updated:20091115" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 2 results", 2, query.getResultSize() );
+
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testProvidedIdMapping() throws Exception{
+ FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
+ SearchFactoryImplementor sf = (SearchFactoryImplementor) fullTextSession.getSearchFactory();
+
+ ProvidedIdEntry person1 = new ProvidedIdEntry();
+ person1.setName( "Big Goat" );
+ person1.setBlurb( "Eats grass" );
+
+ ProvidedIdEntry person2 = new ProvidedIdEntry();
+ person2.setName( "Mini Goat" );
+ person2.setBlurb( "Eats cheese" );
+
+ ProvidedIdEntry person3 = new ProvidedIdEntry();
+ person3.setName( "Regular goat" );
+ person3.setBlurb( "Is anorexic" );
+
+ ManualTransactionContext tc = new ManualTransactionContext();
+
+ Work<ProvidedIdEntry> work = new Work<ProvidedIdEntry>( person1, 1, WorkType.INDEX );
+ sf.getWorker().performWork( work, tc );
+ work = new Work<ProvidedIdEntry>( person2, 2, WorkType.INDEX );
+ sf.getWorker().performWork( work, tc );
+ Work<ProvidedIdEntry> work2 = new Work<ProvidedIdEntry>( person3, 3, WorkType.INDEX );
+ sf.getWorker().performWork( work2, tc );
+
+ tc.end();
+
+ Transaction transaction = fullTextSession.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "providedidentry.name", SearchTestCase.standardAnalyzer );
+ Query luceneQuery = parser.parse( "Goat" );
+
+ //we cannot use FTQuery because @ProvidedId does not provide the getter id and Hibernate Hsearch Query extension
+ //needs it. So we use plain Lucene
+
+ //we know there is only one DP
+ DirectoryProvider<?> provider = fullTextSession.getSearchFactory()
+ .getDirectoryProviders( ProvidedIdEntry.class )[0];
+ IndexSearcher searcher = new IndexSearcher( provider.getDirectory(), true );
+ TopDocs hits = searcher.search( luceneQuery, 1000 );
+ searcher.close();
+ transaction.commit();
+ session.close();
+
+ assertEquals( 3, hits.totalHits );
+ }
+
+ public void testFullTextFilterDefAtMappingLevel() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Address address = new Address();
+ address.setStreet1( "Peachtree Rd NE" );
+ address.setStreet2( "Peachtnot Rd NE" );
+ address.setOwner("test");
+ Calendar c = GregorianCalendar.getInstance();
+ c.setTimeZone( TimeZone.getTimeZone( "GMT" ) ); //for the sake of tests
+ c.set( 2009, Calendar.NOVEMBER, 15);
+
+ address.setLastUpdated(c);
+ s.persist( address );
+
+ address = new Address();
+ address.setStreet1( "Peachtnot Rd NE" );
+ address.setStreet2( "Peachtree Rd NE" );
+ address.setLastUpdated(c);
+ address.setOwner("test2");
+ s.persist( address );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "street1:Peachtnot" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ query.enableFullTextFilter("security").setParameter("ownerName", "test");
+ assertEquals( "expecting 1 results", 1, query.getResultSize() );
+
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testIndexEmbedded() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ ProductCatalog productCatalog = new ProductCatalog();
+ productCatalog.setName("Cars");
+ Item item = new Item();
+ item.setDescription("Ferrari");
+ item.setProductCatalog(productCatalog);
+ productCatalog.addItem(item);
+
+ s.persist(item);
+ s.persist(productCatalog);
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "items.description:Ferrari" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 1 results", 1, query.getResultSize() );
+
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testContainedIn() throws Exception{
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ ProductCatalog productCatalog = new ProductCatalog();
+ productCatalog.setName("Cars");
+ Item item = new Item();
+ item.setDescription("test");
+ item.setProductCatalog(productCatalog);
+ productCatalog.addItem(item);
+
+ s.persist(item);
+ s.persist(productCatalog);
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( "items.description:test" );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 1 results", 1, query.getResultSize() );
+ tx.commit();
+
+ tx = s.beginTransaction();
+
+ Item loaded = (Item) s.get(Item.class, item.getId());
+ loaded.setDescription("Ferrari");
+
+ s.update(loaded);
+ tx.commit();
+
+
+ tx = s.beginTransaction();
+
+ parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ luceneQuery = parser.parse( "items.description:test" );
+ query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 0 results", 0, query.getResultSize() );
+
+ parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ luceneQuery = parser.parse( "items.description:Ferrari" );
+ query = s.createFullTextQuery( luceneQuery ).setProjection( FullTextQuery.THIS, FullTextQuery.SCORE );
+ assertEquals( "expecting 1 results", 1, query.getResultSize() );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ @SuppressWarnings( "unchecked" )
+ List<Object[]> results = query.list();
+
+ for( Object[] result : results ) {
+ s.delete( result[0] );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testClassBridgeMapping() throws Exception {
+ org.hibernate.Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( getDepts1() );
+ s.persist( getDepts2() );
+ s.persist( getDepts3() );
+ s.persist( getDepts4() );
+ s.flush();
+ tx.commit();
+
+ tx = s.beginTransaction();
+ FullTextSession session = Search.getFullTextSession( s );
+
+ // The equipment field is the manufacturer field in the
+ // Departments entity after being massaged by passing it
+ // through the EquipmentType class. This field is in
+ // the Lucene document but not in the Department entity itself.
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "equipment", new SimpleAnalyzer() );
+
+ // Check the second ClassBridge annotation
+ Query query = parser.parse( "equiptype:Cisco" );
+ org.hibernate.search.FullTextQuery hibQuery = session.createFullTextQuery( query, Departments.class );
+ List<Departments> result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect number of results returned", 2, result.size() );
+ for (Departments d : result) {
+ assertEquals("incorrect manufacturer", "C", d.getManufacturer());
+ }
+
+ // No data cross-ups.
+ query = parser.parse( "branchnetwork:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "problem with field cross-ups", result.size() == 0 );
+
+ // Non-ClassBridge field.
+ parser = new QueryParser( getTargetLuceneVersion(), "branchHead", new SimpleAnalyzer() );
+ query = parser.parse( "branchHead:Kent Lewin" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertTrue( "incorrect entity returned, wrong branch head", result.size() == 1 );
+ assertEquals("incorrect entity returned", "Kent Lewin", ( result.get( 0 ) ).getBranchHead());
+
+ // Check other ClassBridge annotation.
+ parser = new QueryParser( getTargetLuceneVersion(), "branchnetwork", new SimpleAnalyzer() );
+ query = parser.parse( "branchnetwork:st. george 1D" );
+ hibQuery = session.createFullTextQuery( query, Departments.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "incorrect entity returned, wrong network", "1D", ( result.get( 0 ) ).getNetwork() );
+ assertEquals( "incorrect entity returned, wrong branch", "St. George", ( result.get( 0 ) ).getBranch() );
+ assertEquals( "incorrect number of results returned", 1, result.size() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Departments.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testDynamicBoosts() throws Exception {
+
+ Session session = openSession();
+ session.beginTransaction();
+
+ DynamicBoostedDescLibrary lib1 = new DynamicBoostedDescLibrary();
+ lib1.setName( "one" );
+ session.persist( lib1 );
+
+ DynamicBoostedDescLibrary lib2 = new DynamicBoostedDescLibrary();
+ lib2.setName( "two" );
+ session.persist( lib2 );
+
+ session.getTransaction().commit();
+ session.close();
+
+ float lib1Score = getScore( new TermQuery( new Term( "name", "one" ) ) );
+ float lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertEquals( "The scores should be equal", lib1Score, lib2Score );
+
+ // set dynamic score and reindex!
+ session = openSession();
+ session.beginTransaction();
+
+ session.refresh( lib2 );
+ lib2.setDynScore( 2.0f );
+
+ session.getTransaction().commit();
+ session.close();
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "one" ) ) );
+ lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertTrue( "lib2score should be greater than lib1score", lib1Score < lib2Score );
+
+
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "foobar" ) ) );
+ assertEquals( "lib1score should be 0 since term is not yet indexed.", 0.0f, lib1Score );
+
+ // index foobar
+ session = openSession();
+ session.beginTransaction();
+
+ session.refresh( lib1 );
+ lib1.setName( "foobar" );
+
+ session.getTransaction().commit();
+ session.close();
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "foobar" ) ) );
+ lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertTrue( "lib1score should be greater than lib2score", lib1Score > lib2Score );
+ }
+
+ private float getScore(Query query) {
+ Session session = openSession();
+ Object[] queryResult;
+ float score;
+ try {
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+ List<?> resultList = fullTextSession
+ .createFullTextQuery( query, DynamicBoostedDescLibrary.class )
+ .setProjection( ProjectionConstants.SCORE, ProjectionConstants.EXPLANATION )
+ .setMaxResults( 1 )
+ .list();
+
+ if ( resultList.size() == 0 ) {
+ score = 0.0f;
+ }
+ else {
+ queryResult = ( Object[] ) resultList.get( 0 );
+ score = ( Float ) queryResult[0];
+ String explanation = queryResult[1].toString();
+ log.debug( "score: " + score + " explanation: " + explanation );
+ }
+ }
+ finally {
+ session.close();
+ }
+ return score;
+ }
+
+ private int nbrOfMatchingResults(String field, String token, FullTextSession s) throws ParseException {
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), field, SearchTestCase.standardAnalyzer );
+ org.apache.lucene.search.Query luceneQuery = parser.parse( token );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ return query.getResultSize();
+ }
+
+
+ private Departments getDepts1() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "Salt Lake City" );
+ depts.setBranchHead( "Kent Lewin" );
+ depts.setMaxEmployees( 100 );
+ depts.setNetwork( "1A" );
+ depts.setManufacturer( "C" );
+
+ return depts;
+ }
+
+ private Departments getDepts2() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "Layton" );
+ depts.setBranchHead( "Terry Poperszky" );
+ depts.setMaxEmployees( 20 );
+ depts.setNetwork( "2B" );
+ depts.setManufacturer( "3" );
+
+ return depts;
+ }
+
+ private Departments getDepts3() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "West Valley" );
+ depts.setBranchHead( "Pat Kelley" );
+ depts.setMaxEmployees( 15 );
+ depts.setNetwork( "3C" );
+ depts.setManufacturer( "D" );
+
+ return depts;
+ }
+
+ private Departments getDepts4() {
+ Departments depts = new Departments();
+
+ depts.setBranch( "St. George" );
+ depts.setBranchHead( "Spencer Stajskal" );
+ depts.setMaxEmployees( 10 );
+ depts.setNetwork( "1D" );
+ depts.setManufacturer( "C" );
+ return depts;
+ }
+
+
+ @Override
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.getProperties().put( Environment.MODEL_MAPPING, ProgrammaticSearchMappingFactory.class.getName() );
+ }
+
+ public void NotUseddefineMapping() {
+ SearchMapping mapping = new SearchMapping();
+ mapping.analyzerDef( "stem", StandardTokenizerFactory.class )
+ .tokenizerParam( "name", "value" )
+ .tokenizerParam( "name2", "value2" )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( SnowballPorterFilterFactory.class)
+ .param("language", "English")
+ .analyzerDef( "ngram", StandardTokenizerFactory.class )
+ .tokenizerParam( "name", "value" )
+ .tokenizerParam( "name2", "value2" )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( NGramFilterFactory.class)
+ .param("minGramSize", "3")
+ .param("maxGramSize", "3")
+ .entity(Address.class).indexed().indexName("Address_Index")
+ .property("street1", ElementType.FIELD)
+ .field()
+ .field()
+ .name("street1_iso")
+ .store( Store.YES )
+ .index( Index.TOKENIZED )
+ .analyzer( ISOLatin1Analyzer.class)
+ .field()
+ .name("street1_ngram")
+ .analyzer("ngram")
+ .entity(User.class).indexed()
+ .property("name", ElementType.METHOD)
+ .field()
+ .analyzerDef( "minimal", StandardTokenizerFactory.class );
+
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class<?>[] {
+ Address.class,
+ Country.class,
+ BlogEntry.class,
+ ProvidedIdEntry.class,
+ ProductCatalog.class,
+ Item.class,
+ Departments.class,
+ DynamicBoostedDescLibrary.class
+
+ };
+ }
+
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticMappingTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticSearchMappingFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticSearchMappingFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticSearchMappingFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,155 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.lang.annotation.ElementType;
+
+import org.apache.lucene.search.DefaultSimilarity;
+import org.apache.solr.analysis.EnglishPorterFilterFactory;
+import org.apache.solr.analysis.GermanStemFilterFactory;
+import org.apache.solr.analysis.LowerCaseFilterFactory;
+import org.apache.solr.analysis.NGramFilterFactory;
+import org.apache.solr.analysis.StandardTokenizerFactory;
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.annotations.FilterCacheModeType;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.bridge.builtin.LongBridge;
+import org.hibernate.search.cfg.ConcatStringBridge;
+import org.hibernate.search.cfg.SearchMapping;
+
+public class ProgrammaticSearchMappingFactory {
+
+ @Factory
+ public SearchMapping build() {
+ SearchMapping mapping = new SearchMapping();
+
+ mapping.fullTextFilterDef("security", SecurityFilterFactory.class).cache(FilterCacheModeType.INSTANCE_ONLY)
+ .analyzerDef( "ngram", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( NGramFilterFactory.class )
+ .param( "minGramSize", "3" )
+ .param( "maxGramSize", "3" )
+ .analyzerDef( "en", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( EnglishPorterFilterFactory.class )
+ .analyzerDef( "de", StandardTokenizerFactory.class )
+ .filter( LowerCaseFilterFactory.class )
+ .filter( GermanStemFilterFactory.class )
+ .entity( Address.class )
+ .indexed()
+ .similarity( DefaultSimilarity.class )
+ .boost( 2 )
+ .property( "addressId", ElementType.FIELD ).documentId().name( "id" )
+ .property("lastUpdated", ElementType.FIELD)
+ .field().name("last-updated")
+ .analyzer("en").store(Store.YES)
+ .calendarBridge(Resolution.DAY)
+ .property("dateCreated", ElementType.FIELD)
+ .field().name("date-created").index(Index.TOKENIZED)
+ .analyzer("en").store( Store.YES )
+ .dateBridge(Resolution.DAY)
+ .property("owner", ElementType.FIELD)
+ .field()
+ .property( "street1", ElementType.FIELD )
+ .field()
+ .field().name( "street1_ngram" ).analyzer( "ngram" )
+ .field()
+ .name( "street1_abridged" )
+ .bridge( ConcatStringBridge.class ).param( ConcatStringBridge.SIZE, "4" )
+ .property( "street2", ElementType.METHOD )
+ .field().name( "idx_street2" ).store( Store.YES ).boost( 2 )
+ .entity(ProvidedIdEntry.class).indexed()
+ .providedId().name("providedidentry").bridge(LongBridge.class)
+ .property("name", ElementType.FIELD)
+ .field().name("providedidentry.name").analyzer("en").index(Index.TOKENIZED).store(Store.YES)
+ .property("blurb", ElementType.FIELD)
+ .field().name("providedidentry.blurb").analyzer("en").index(Index.TOKENIZED).store(Store.YES)
+ .property("age", ElementType.FIELD)
+ .field().name("providedidentry.age").analyzer("en").index(Index.TOKENIZED).store(Store.YES)
+ .entity(ProductCatalog.class).indexed()
+ .similarity( DefaultSimilarity.class )
+ .boost( 2 )
+ .property( "id", ElementType.FIELD ).documentId().name( "id" )
+ .property("name", ElementType.FIELD)
+ .field().name("productCatalogName").index(Index.TOKENIZED).analyzer("en").store(Store.YES)
+ .property("items", ElementType.FIELD)
+ .indexEmbedded()
+ .entity(Item.class)
+ .property("description", ElementType.FIELD)
+ .field().name("description").analyzer("en").index(Index.TOKENIZED).store(Store.YES)
+ .property("productCatalog", ElementType.FIELD)
+ .containedIn()
+ .entity(DynamicBoostedDescLibrary.class)
+ .dynamicBoost(CustomBoostStrategy.class)
+ .indexed()
+ .property("libraryId", ElementType.FIELD)
+ .documentId().name("id")
+ .property("name", ElementType.FIELD)
+ .dynamicBoost(CustomFieldBoostStrategy.class)
+ .field().store(Store.YES)
+ .entity(Departments.class)
+ .classBridge(CatDeptsFieldsClassBridge.class)
+ .name("branchnetwork")
+ .index(Index.TOKENIZED)
+ .store(Store.YES)
+ .param("sepChar", " ")
+ .classBridge(EquipmentType.class)
+ .name("equiptype")
+ .index(Index.TOKENIZED)
+ .store(Store.YES)
+ .param("C", "Cisco")
+ .param("D", "D-Link")
+ .param("K", "Kingston")
+ .param("3", "3Com")
+ .indexed()
+ .property("deptsId", ElementType.FIELD)
+ .documentId().name("id")
+ .property("branchHead", ElementType.FIELD)
+ .field().store(Store.YES)
+ .property("network", ElementType.FIELD)
+ .field().store(Store.YES)
+ .property("branch", ElementType.FIELD)
+ .field().store(Store.YES)
+ .property("maxEmployees", ElementType.FIELD)
+ .field().index(Index.UN_TOKENIZED).store(Store.YES)
+ .entity( BlogEntry.class ).indexed()
+ .property( "title", ElementType.METHOD )
+ .field()
+ .property( "description", ElementType.METHOD )
+ .field()
+ .property( "language", ElementType.METHOD )
+ .analyzerDiscriminator(BlogEntry.BlogLangDiscriminator.class)
+ .property("dateCreated", ElementType.METHOD)
+ .field()
+ .name("blog-entry-created")
+ .analyzer("en")
+ .store(Store.YES)
+ .dateBridge(Resolution.DAY);
+ return mapping;
+
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProgrammaticSearchMappingFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProvidedIdEntry.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProvidedIdEntry.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProvidedIdEntry.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.io.Serializable;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+@Entity
+public class ProvidedIdEntry implements Serializable {
+
+ private static final long serialVersionUID = 6756516458812576484L;
+
+ @Id
+ @GeneratedValue
+ private long id;
+
+ private String name;
+ private String blurb;
+ private int age;
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getBlurb() {
+ return blurb;
+ }
+
+ public void setBlurb(String blurb) {
+ this.blurb = blurb;
+ }
+
+ public int getAge() {
+ return age;
+ }
+
+ public void setAge(int age) {
+ this.age = age;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ProvidedIdEntry.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/SecurityFilterFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/SecurityFilterFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/SecurityFilterFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.search.CachingWrapperFilter;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.util.OpenBitSet;
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.annotations.Key;
+import org.hibernate.search.filter.FilterKey;
+import org.hibernate.search.filter.StandardFilterKey;
+
+public class SecurityFilterFactory {
+
+ private static final long serialVersionUID = -19238668272676998L;
+
+ private String ownerName;
+
+ public void setOwnerName(String ownerName) {
+ this.ownerName = ownerName;
+ }
+
+ @Factory
+ public Filter buildSecurityFilter() {
+ SecurityFilter securityFilter = new SecurityFilter(ownerName);
+ return new CachingWrapperFilter(securityFilter);
+ }
+
+ @Key
+ public FilterKey getKey() {
+ StandardFilterKey key = new StandardFilterKey();
+ key.addParameter(ownerName);
+ return key;
+ }
+
+ private static final class SecurityFilter extends Filter {
+ private static final long serialVersionUID = -5105989141875576599L;
+ private final String ownerName;
+
+ private SecurityFilter(final String ownerName) {
+ this.ownerName = ownerName;
+ }
+
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );
+ TermDocs termDocs = reader.termDocs( new Term( "owner", ownerName ) );
+ while ( termDocs.next() ) {
+ bitSet.set( termDocs.doc() );
+ }
+ return bitSet;
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/SecurityFilterFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ShardsConfigurationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ShardsConfigurationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,123 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_BUFFERED_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_MERGE_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MERGE_FACTOR;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.RAM_BUFFER_SIZE;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.TRANSACTION;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.BATCH;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.Document;
+import org.hibernate.search.test.query.Author;
+import org.hibernate.search.test.query.Book;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class ShardsConfigurationTest extends ConfigurationReadTestCase {
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ //super contains these:
+// cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
+// cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
+ cfg.setProperty( "hibernate.search.default.sharding_strategy.nbr_of_shards", "2" );// permit this?
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.default.2.directory_provider", RAMDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_buffered_docs", "4" );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_merge_docs", "5" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_buffered_docs", "6" );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy.nbr_of_shards", "4" );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy", UselessShardingStrategy.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy.test.system.default", "45" );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy.test.output", "70" );
+ cfg.setProperty( "hibernate.search.Documents.0.batch.max_merge_docs", "57" );
+ cfg.setProperty( "hibernate.search.Documents.0.directory_provider", RAMDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.0.transaction.max_buffered_docs", "58" );
+ cfg.setProperty( "hibernate.search.Documents.1.batch.max_merge_docs", "11" );
+ cfg.setProperty( "hibernate.search.Documents.1.transaction.max_buffered_docs", "12" );
+ cfg.setProperty( "hibernate.search.Documents.1.transaction.term_index_interval", "12" );
+ }
+
+ public void testCorrectNumberOfShardsDetected() {
+ DirectoryProvider[] docDirProviders = getSearchFactory()
+ .getDirectoryProviders( Document.class );
+ assertNotNull( docDirProviders);
+ assertEquals( 4, docDirProviders.length );
+ DirectoryProvider[] bookDirProviders = getSearchFactory()
+ .getDirectoryProviders( Book.class );
+ assertNotNull( bookDirProviders );
+ assertEquals( 2, bookDirProviders.length );
+ }
+
+ public void testSelectionOfShardingStrategy() {
+ IndexShardingStrategy shardingStrategy = getSearchFactory().getDocumentBuilderIndexedEntity( Document.class )
+ .getDirectoryProviderSelectionStrategy();
+ assertNotNull( shardingStrategy );
+ assertEquals( shardingStrategy.getClass(), UselessShardingStrategy.class );
+ }
+
+ public void testShardingSettingsInherited() {
+ DirectoryProvider[] docDirProviders = getSearchFactory().getDirectoryProviders( Document.class );
+ assertTrue( docDirProviders[0] instanceof RAMDirectoryProvider );
+ assertTrue( docDirProviders[1] instanceof FSDirectoryProvider );
+ assertTrue( docDirProviders[2] instanceof RAMDirectoryProvider );
+ assertValueIsSet( Document.class, 0, BATCH, MAX_BUFFERED_DOCS, 4 );
+ }
+
+ public void testShardN2UsesDefaults() {
+ assertValueIsSet( Document.class, 2, TRANSACTION, MAX_BUFFERED_DOCS, 6 );
+ assertValueIsDefault( Document.class, 2, TRANSACTION, MAX_MERGE_DOCS );
+ assertValueIsSet( Document.class, 2, TRANSACTION, MERGE_FACTOR, 100 );
+ assertValueIsDefault( Document.class, 2, TRANSACTION, RAM_BUFFER_SIZE );
+ assertValueIsSet( Document.class, 2, BATCH, MAX_BUFFERED_DOCS, 4 );
+ assertValueIsSet( Document.class, 2, BATCH, MAX_MERGE_DOCS, 5 );
+ assertValueIsDefault( Document.class, 2, BATCH, MERGE_FACTOR );
+ assertValueIsDefault( Document.class, 2, BATCH, RAM_BUFFER_SIZE );
+ }
+
+ public void testShardN1_ExplicitParams() {
+ assertValueIsSet( Document.class, 1, TRANSACTION, MAX_BUFFERED_DOCS, 12 );
+ assertValueIsSet( Document.class, 1, BATCH, MAX_MERGE_DOCS, 11 );
+ }
+
+ @Override
+ protected void ensureIndexesAreEmpty() {
+ // skips index emptying to prevent a problem with UselessShardingStrategy
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Author.class,
+ Document.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/UselessShardingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/UselessShardingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/UselessShardingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+import java.io.Serializable;
+import java.util.Properties;
+import java.util.Enumeration;
+
+import org.apache.lucene.document.Document;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+
+/**
+ * Used to test the configuration of a third-party strategy
+ * @author Sanne Grinovero
+ */
+public class UselessShardingStrategy implements IndexShardingStrategy {
+
+ public DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document) {
+ return null;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForAllShards() {
+ return null;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString) {
+ return null;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] fullTextFilters) {
+ return null;
+ }
+
+ public void initialize(Properties properties, DirectoryProvider<?>[] providers) {
+ Enumeration<?> propertyNames = properties.propertyNames();
+ int counter;
+ counter = checkEnumeration( propertyNames );
+ if (counter != 2) throw new IllegalStateException( "propertyNames() fails" );
+ counter = checkEnumeration( properties.keys() );
+ if (counter != 2) throw new IllegalStateException( "keys() fails" );
+ counter = 0;
+ for (Object key : properties.keySet() ) {
+ if ( ! String.class.isInstance( key ) ) continue;
+ if ( String.class.cast( key ).startsWith("test.") ) {
+ System.out.println( key );
+ counter++;
+ }
+ }
+ if (counter != 2) throw new IllegalStateException( "keySet() fails" );
+ }
+
+ private int checkEnumeration(Enumeration<?> propertyNames) {
+ int counter = 0;
+ while ( propertyNames.hasMoreElements() ) {
+ Object key = propertyNames.nextElement();
+ if ( ! String.class.isInstance( key ) ) continue;
+ String propertyName = (String) key;
+ if ( propertyName.startsWith("test.") ) {
+ counter++;
+ }
+ }
+ return counter;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/UselessShardingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/User.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/User.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/User.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.configuration;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class User {
+ private String name;
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/configuration/User.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockFactoryFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockFactoryFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockFactoryFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import java.io.File;
+import java.util.Properties;
+
+import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.SingleInstanceLockFactory;
+import org.hibernate.search.store.LockFactoryFactory;
+
+public class CustomLockFactoryFactory implements LockFactoryFactory {
+
+ // A real implementation would probably not use a static field; useful to keep the test simple.
+ static String optionValue;
+
+ public LockFactory createLockFactory(File indexDir, Properties dirConfiguration) {
+ optionValue = dirConfiguration.getProperty( "locking_option" );
+ return new SingleInstanceLockFactory();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockFactoryFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockProviderTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockProviderTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockProviderTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,69 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import junit.framework.TestCase;
+
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class CustomLockProviderTest extends TestCase {
+
+ public void testUseOfCustomLockingFactory() {
+ assertNull( CustomLockFactoryFactory.optionValue );
+ FullTextSessionBuilder builder = new FullTextSessionBuilder();
+ builder
+ .addAnnotatedClass( SnowStorm.class )
+ .setProperty( "hibernate.search.default.locking_option", "somethingHere" )
+ .setProperty( "hibernate.search.default.locking_strategy", "org.hibernate.search.test.directoryProvider.CustomLockFactoryFactory")
+ .build();
+ builder.close();
+ assertEquals( "somethingHere", CustomLockFactoryFactory.optionValue );
+ }
+
+ public void testFailOnInexistentLockingFactory() {
+ FullTextSessionBuilder builder = new FullTextSessionBuilder();
+ try {
+ builder
+ .addAnnotatedClass( SnowStorm.class )
+ .setProperty( "hibernate.search.default.locking_option", "somethingHere" )
+ .setProperty( "hibernate.search.default.locking_strategy", "org.hibernate.NotExistingFactory")
+ .build();
+ builder.close();
+ fail();
+ }
+ catch (org.hibernate.HibernateException e) {
+ Throwable causeSearch = e.getCause();
+ assertNotNull( causeSearch );
+ assertTrue( causeSearch instanceof org.hibernate.search.SearchException );
+ Throwable causeLockin = causeSearch.getCause();
+ assertNotNull( causeLockin );
+ assertEquals( "Unable to find locking_strategy implementation class: org.hibernate.NotExistingFactory", causeLockin.getMessage() );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/CustomLockProviderTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/DirectoryProviderHelperTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/DirectoryProviderHelperTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/DirectoryProviderHelperTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,102 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import java.io.File;
+import java.util.Properties;
+import junit.framework.TestCase;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.store.DirectoryProviderHelper;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * @author Gavin King
+ * @author Sanne Grinovero
+ */
+public class DirectoryProviderHelperTest extends TestCase {
+
+ public void testMkdirsDetermineIndex() {
+ String root = "./testDir/dir1/dir2";
+ String relative = "dir3";
+
+ Properties properties = new Properties();
+ properties.put( "indexBase", root );
+ properties.put( "indexName", relative );
+
+ DirectoryProviderHelper.getVerifiedIndexDir( "name", properties, true );
+
+ assertTrue( new File( root ).exists() );
+
+ FileHelper.delete( new File( "./testDir" ) );
+ }
+
+ public void testMkdirsGetSource() {
+ String root = "./testDir";
+ String relative = "dir1/dir2/dir3";
+
+ Properties properties = new Properties();
+ properties.put( "sourceBase", root );
+ properties.put( "source", relative );
+
+ File rel = DirectoryProviderHelper.getSourceDirectory( "name", properties, true );
+
+ assertTrue( rel.exists() );
+
+ FileHelper.delete( new File( root ) );
+ }
+
+ public void testConfiguringCopyBufferSize() {
+ Properties prop = new Properties();
+ long mB = 1024 * 1024;
+
+ //default to FileHelper default:
+ assertEquals( FileHelper.DEFAULT_COPY_BUFFER_SIZE, DirectoryProviderHelper.getCopyBufferSize( "testIdx", prop ) );
+
+ //any value from MegaBytes:
+ prop.setProperty( DirectoryProviderHelper.COPYBUFFERSIZE_PROP_NAME, "4" );
+ assertEquals( 4*mB, DirectoryProviderHelper.getCopyBufferSize( "testIdx", prop ) );
+ prop.setProperty( DirectoryProviderHelper.COPYBUFFERSIZE_PROP_NAME, "1000" );
+ assertEquals( 1000*mB, DirectoryProviderHelper.getCopyBufferSize( "testIdx", prop ) );
+
+ //invalid values
+ prop.setProperty( DirectoryProviderHelper.COPYBUFFERSIZE_PROP_NAME, "0" );
+ boolean testOk = false;
+ try {
+ DirectoryProviderHelper.getCopyBufferSize( "testIdx", prop );
+ } catch (SearchException e){
+ testOk = true;
+ }
+ assertTrue( testOk );
+ prop.setProperty( DirectoryProviderHelper.COPYBUFFERSIZE_PROP_NAME, "-100" );
+ testOk = false;
+ try {
+ DirectoryProviderHelper.getCopyBufferSize( "testIdx", prop );
+ } catch (SearchException e){
+ testOk = true;
+ }
+ assertTrue( testOk );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/DirectoryProviderHelperTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,238 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import java.io.File;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.slf4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.HibernateException;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Test case for master/slave directories.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class FSSlaveAndMasterDPTest extends MultipleSFTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ private static File root;
+
+ static {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
+ buildDir = ".";
+ }
+ root = new File( buildDir, "lucenedirs" );
+ log.info( "Using {} as test directory.", root.getAbsolutePath() );
+ }
+
+ /**
+ * The lucene index directory which is shared between master and slave.
+ */
+ private String masterCopy = "/master/copy";
+
+ /**
+ * The lucene index directory which is specific to the master node.
+ */
+ private String masterMain = "/master/main";
+
+ /**
+ * The lucene index directory which is specific to the slave node.
+ */
+ private String slave = "/slave";
+
+ /**
+ * Verifies that copies of the master get properly copied to the slaves.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testProperCopy() throws Exception {
+
+ // assert that the salve index is empty
+ FullTextSession fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ Transaction tx = fullTextSession.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ List result = fullTextSession.createFullTextQuery( parser.parse( "location:texas" ) ).list();
+ assertEquals( "No copy yet, fresh index expected", 0, result.size() );
+ tx.commit();
+ fullTextSession.close();
+
+
+ // create an entity on the master and persist it in order to index it
+ Session session = getMasterSession();
+ tx = session.beginTransaction();
+ SnowStorm sn = new SnowStorm();
+ sn.setDate( new Date() );
+ sn.setLocation( "Dallas, TX, USA" );
+ session.persist( sn );
+ tx.commit();
+ session.close();
+
+ int waitPeriodMilli = 2010; // wait a bit more than 2 refresh periods (one master / one slave) - 2 * 1 * 1000 + 10
+ Thread.sleep( waitPeriodMilli );
+
+ // assert that the master hass indexed the snowstorm
+ log.info( "Searching master" );
+ fullTextSession = Search.getFullTextSession( getMasterSession() );
+ tx = fullTextSession.beginTransaction();
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas" ) ).list();
+ assertEquals( "Original should get one", 1, result.size() );
+ tx.commit();
+ fullTextSession.close();
+
+ // assert that index got copied to the salve as well
+ log.info( "Searching slave" );
+ fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ tx = fullTextSession.beginTransaction();
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas" ) ).list();
+ assertEquals( "First copy did not work out", 1, result.size() );
+ tx.commit();
+ fullTextSession.close();
+
+ // add a new snowstorm to the master
+ session = getMasterSession();
+ tx = session.beginTransaction();
+ sn = new SnowStorm();
+ sn.setDate( new Date() );
+ sn.setLocation( "Chennai, India" );
+ session.persist( sn );
+ tx.commit();
+ session.close();
+
+ Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one slave)
+
+ // assert that the new snowstorm made it into the slave
+ log.info( "Searching slave" );
+ fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ tx = fullTextSession.beginTransaction();
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:chennai" ) ).list();
+ assertEquals( "Second copy did not work out", 1, result.size() );
+ tx.commit();
+ fullTextSession.close();
+
+ session = getMasterSession();
+ tx = session.beginTransaction();
+ sn = new SnowStorm();
+ sn.setDate( new Date() );
+ sn.setLocation( "Melbourne, Australia" );
+ session.persist( sn );
+ tx.commit();
+ session.close();
+
+ Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one slave)
+
+ // once more - assert that the new snowstorm made it into the slave
+ log.info( "Searching slave" );
+ fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ tx = fullTextSession.beginTransaction();
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:melbourne" ) ).list();
+ assertEquals( "Third copy did not work out", 1, result.size() );
+ tx.commit();
+ fullTextSession.close();
+ }
+
+ private Session getMasterSession() {
+ return getSessionFactories()[0].openSession();
+ }
+
+ private Session getSlaveSession() {
+ return getSessionFactories()[1].openSession();
+ }
+
+ protected void setUp() throws Exception {
+
+ if ( root.exists() ) {
+ FileHelper.delete( root );
+ }
+
+ if ( !root.mkdir() ) {
+ throw new HibernateException( "Unable to setup test directories" );
+ }
+
+ File master = new File( root, masterMain );
+ if ( !master.mkdirs() ) {
+ throw new HibernateException( "Unable to setup master directory" );
+ }
+
+ master = new File( root, masterCopy );
+ if ( !master.mkdirs() ) {
+ throw new HibernateException( "Unable to setup master copy directory" );
+ }
+
+ File slaveFile = new File( root, slave );
+ if ( !slaveFile.mkdirs() ) {
+ throw new HibernateException( "Unable to setup slave directory" );
+ }
+ super.setUp();
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ log.info( "Deleting test directory {} ", root.getAbsolutePath() );
+ FileHelper.delete( root );
+ }
+
+ protected int getSFNbrs() {
+ return 2;
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ SnowStorm.class
+ };
+ }
+
+ protected void configure(Configuration[] cfg) {
+ //master
+ cfg[0].setProperty( "hibernate.search.default.sourceBase", root.getAbsolutePath() + masterCopy );
+ cfg[0].setProperty( "hibernate.search.default.indexBase", root.getAbsolutePath() + masterMain );
+ cfg[0].setProperty( "hibernate.search.default.refresh", "1" ); //every second
+ cfg[0].setProperty(
+ "hibernate.search.default.directory_provider", "org.hibernate.search.store.FSMasterDirectoryProvider"
+ );
+
+ //slave(s)
+ cfg[1].setProperty( "hibernate.search.default.sourceBase", root.getAbsolutePath() + masterCopy );
+ cfg[1].setProperty( "hibernate.search.default.indexBase", root.getAbsolutePath() + slave );
+ cfg[1].setProperty( "hibernate.search.default.refresh", "1" ); //every second
+ cfg[1].setProperty(
+ "hibernate.search.default.directory_provider", "org.hibernate.search.store.FSSlaveDirectoryProvider"
+ );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,140 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import java.io.InputStream;
+
+import junit.framework.TestCase;
+
+import org.apache.lucene.util.Version;
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.cfg.Environment;
+import org.hibernate.dialect.Dialect;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Build multiple session factories from the same set of classes
+ * The configuration can be altered overriding {@link #configure}.
+ *
+ * @author Emmanuel Bernard
+ */
+public abstract class MultipleSFTestCase extends TestCase {
+
+ private static SessionFactory[] sessionFactories;
+ private static AnnotationConfiguration[] cfgs;
+ private static Dialect dialect;
+ private static Class lastTestClass;
+
+ protected abstract int getSFNbrs();
+
+ protected void buildSessionFactories(Class[] classes, String[] packages, String[] xmlFiles) throws Exception {
+ if ( sessionFactories == null ) {
+ sessionFactories = new SessionFactory[getSFNbrs()];
+ }
+ if ( cfgs == null ) {
+ cfgs = new AnnotationConfiguration[getSFNbrs()];
+ }
+ for ( SessionFactory sf : sessionFactories ) {
+ if ( sf != null ) {
+ sf.close();
+ }
+ }
+ for ( int sfIndex = 0; sfIndex < getSFNbrs(); sfIndex++ ) {
+ cfgs[sfIndex] = new AnnotationConfiguration();
+ }
+ configure( cfgs );
+ for ( int sfIndex = 0; sfIndex < getSFNbrs(); sfIndex++ ) {
+ try {
+ if ( recreateSchema() ) {
+ cfgs[sfIndex].setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
+ }
+ for ( String aPackage : packages ) {
+ cfgs[sfIndex].addPackage( aPackage );
+ }
+ for ( Class aClass : classes ) {
+ cfgs[sfIndex].addAnnotatedClass( aClass );
+ }
+ for ( String xmlFile : xmlFiles ) {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFile );
+ cfgs[sfIndex].addInputStream( is );
+ }
+ setDialect( Dialect.getDialect() );
+ sessionFactories[sfIndex] = cfgs[sfIndex].buildSessionFactory( /*new TestInterceptor()*/ );
+ }
+ catch ( Exception e ) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+ }
+
+ protected void setUp() throws Exception {
+ if ( sessionFactories == null || sessionFactories[0] == null || lastTestClass != getClass() ) {
+ buildSessionFactories( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ lastTestClass = getClass();
+ }
+ }
+
+ protected void tearDown() throws Exception {
+ for ( SessionFactory sf : getSessionFactories() ) {
+ sf.close();
+ }
+ }
+
+ protected abstract Class[] getMappings();
+
+ protected String[] getAnnotatedPackages() {
+ return new String[] { };
+ }
+
+ protected String[] getXmlFiles() {
+ return new String[] { };
+ }
+
+ private void setDialect(Dialect dialect) {
+ MultipleSFTestCase.dialect = dialect;
+ }
+
+ protected Dialect getDialect() {
+ return dialect;
+ }
+
+ protected abstract void configure(Configuration[] cfg);
+
+ protected boolean recreateSchema() {
+ return true;
+ }
+
+ public static SessionFactory[] getSessionFactories() {
+ return sessionFactories;
+ }
+
+ public static Version getTargetLuceneVersion() {
+ return SearchTestCase.getTargetLuceneVersion();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/SnowStorm.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/SnowStorm.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/SnowStorm.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,84 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.directoryProvider;
+
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Resolution;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Indexed
+@Entity
+public class SnowStorm {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.UN_TOKENIZED)
+ @DateBridge( resolution = Resolution.DAY )
+ @Column(name="xdate")
+ private Date date;
+
+ @Field(index = Index.TOKENIZED)
+ private String location;
+
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Date getDate() {
+ return date;
+ }
+
+ public void setDate(Date date) {
+ this.date = date;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public void setLocation(String location) {
+ this.location = location;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/directoryProvider/SnowStorm.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Address.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Address.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Address.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,111 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.annotations.Target;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+
+@Entity
+@Indexed
+public class Address {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String street;
+
+ @IndexedEmbedded(depth = 1, prefix = "ownedBy_", targetElement = Owner.class)
+ @Target(Owner.class)
+ private Person ownedBy;
+
+ @OneToMany(mappedBy = "address")
+ @ContainedIn
+ private Set<Tower> towers = new HashSet<Tower>();
+
+ @ManyToOne(cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ private Country country;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getStreet() {
+ return street;
+ }
+
+ public void setStreet(String street) {
+ this.street = street;
+ }
+
+ public Person getOwnedBy() {
+ return ownedBy;
+ }
+
+ public void setOwnedBy(Person ownedBy) {
+ this.ownedBy = ownedBy;
+ }
+
+
+ public Set<Tower> getTowers() {
+ return towers;
+ }
+
+ public void setTowers(Set<Tower> towers) {
+ this.towers = towers;
+ }
+
+ public Country getCountry() {
+ return country;
+ }
+
+ public void setCountry(Country country) {
+ this.country = country;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Address.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Author.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Author.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Author.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class Author {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index= Index.TOKENIZED)
+ private String name;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Author.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Country.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Country.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Country.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,85 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.hibernate.annotations.IndexColumn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Country {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field
+ private String name;
+
+ @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
+ //FIXME with JPA 2, move to @OrderColumn
+ @IndexColumn(name = "list_position")
+ @IndexedEmbedded
+ private List<State> states = new ArrayList<State>();
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public List<State> getStates() {
+ return states;
+ }
+
+ public void setStates(List<State> states) {
+ this.states = states;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Country.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/EmbeddedTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/EmbeddedTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/EmbeddedTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,343 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class EmbeddedTest extends SearchTestCase {
+
+ public void testEmbeddedIndexing() throws Exception {
+ Tower tower = new Tower();
+ tower.setName( "JBoss tower" );
+ Address a = new Address();
+ a.setStreet( "Tower place" );
+ a.getTowers().add( tower );
+ tower.setAddress( a );
+ Person o = new Owner();
+ o.setName( "Atlanta Renting corp" );
+ a.setOwnedBy( o );
+ o.setAddress( a );
+ Country c = new Country();
+ c.setName( "France" );
+ a.setCountry( c );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( tower );
+ tx.commit();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+ query = parser.parse( "address.street:place" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "unable to find property in embedded", 1, result.size() );
+
+ query = parser.parse( "address.ownedBy_name:renting" );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "unable to find property in embedded", 1, result.size() );
+
+ query = parser.parse( "address.id:" + a.getId().toString() );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "unable to find property by id of embedded", 1, result.size() );
+
+ query = parser.parse( "address.country.name:" + a.getCountry().getName() );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "unable to find property with 2 levels of embedded", 1, result.size() );
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ Address address = (Address) s.get( Address.class, a.getId() );
+ address.getOwnedBy().setName( "Buckhead community" );
+ tx.commit();
+
+ s.clear();
+
+ session = Search.getFullTextSession( s );
+
+ query = parser.parse( "address.ownedBy_name:buckhead" );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 1, result.size() );
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ s.delete( s.get( Tower.class, tower.getId() ) );
+ tx.commit();
+
+ s.close();
+
+ }
+
+ public void testEmbeddedIndexingOneToMany() throws Exception {
+ Country country = new Country();
+ country.setName( "Germany" );
+ List<State> states = new ArrayList<State>();
+ State bayern = new State();
+ bayern.setName( "Bayern" );
+ State hessen = new State();
+ hessen.setName( "Hessen" );
+ State sachsen = new State();
+ sachsen.setName( "Sachsen" );
+ states.add( bayern );
+ states.add( hessen );
+ states.add( sachsen );
+ country.setStates( states );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( country );
+ tx.commit();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+ query = parser.parse( "states.name:Hessen" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "unable to find property in embedded", 1, result.size() );
+ s.close();
+ }
+
+ public void testContainedIn() throws Exception {
+ Tower tower = new Tower();
+ tower.setName( "JBoss tower" );
+ Address a = new Address();
+ a.setStreet( "Tower place" );
+ a.getTowers().add( tower );
+ tower.setAddress( a );
+ Person o = new Owner();
+ o.setName( "Atlanta Renting corp" );
+ a.setOwnedBy( o );
+ o.setAddress( a );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( tower );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ Address address = (Address) s.get( Address.class, a.getId() );
+ address.setStreet( "Peachtree Road NE" );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+ query = parser.parse( "address.street:peachtree" );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 1, result.size() );
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ address = (Address) s.get( Address.class, a.getId() );
+ Tower tower1 = address.getTowers().iterator().next();
+ tower1.setAddress( null );
+ address.getTowers().remove( tower1 );
+ tx.commit();
+
+ s.clear();
+
+ session = Search.getFullTextSession( s );
+
+ query = parser.parse( "address.street:peachtree" );
+ result = session.createFullTextQuery( query, Tower.class ).list();
+ assertEquals( "breaking link fails", 0, result.size() );
+
+ tx = s.beginTransaction();
+ s.delete( s.get( Tower.class, tower.getId() ) );
+ tx.commit();
+
+ s.close();
+
+ }
+
+ public void testIndexedEmbeddedAndCollections() throws Exception {
+ Author a = new Author();
+ a.setName( "Voltaire" );
+ Author a2 = new Author();
+ a2.setName( "Victor Hugo" );
+ Author a3 = new Author();
+ a3.setName( "Moliere" );
+ Author a4 = new Author();
+ a4.setName( "Proust" );
+
+ Order o = new Order();
+ o.setOrderNumber( "ACVBNM" );
+
+ Order o2 = new Order();
+ o2.setOrderNumber( "ZERTYD" );
+
+ Product p1 = new Product();
+ p1.setName( "Candide" );
+ p1.getAuthors().add( a );
+ p1.getAuthors().add( a2 ); // be creative
+
+ Product p2 = new Product();
+ p2.setName( "Le malade imaginaire" );
+ p2.getAuthors().add( a3 );
+ p2.getOrders().put( "Emmanuel", o );
+ p2.getOrders().put( "Gavin", o2 );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( a );
+ s.persist( a2 );
+ s.persist( a3 );
+ s.persist( a4 );
+ s.persist( o );
+ s.persist( o2 );
+ s.persist( p1 );
+ s.persist( p2 );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ tx = session.beginTransaction();
+
+ QueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(), new String[] { "name", "authors.name" }, SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+ query = parser.parse( "Hugo" );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ assertEquals( "collection of embedded ignored", 1, result.size() );
+
+ // update the collection
+ Product p = (Product) result.get( 0 );
+ p.getAuthors().add( a4 );
+
+ // PhraseQuery
+ query = new TermQuery( new Term( "orders.orderNumber", "ZERTYD" ) );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ assertEquals( "collection of untokenized ignored", 1, result.size() );
+ query = new TermQuery( new Term( "orders.orderNumber", "ACVBNM" ) );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ assertEquals( "collection of untokenized ignored", 1, result.size() );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ session = Search.getFullTextSession( s );
+ query = parser.parse( "Proust" );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ // HSEARCH-56
+ assertEquals( "update of collection of embedded ignored", 1, result.size() );
+
+ s.delete( s.get( Product.class, p1.getId() ) );
+ s.delete( s.get( Product.class, p2.getId() ) );
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * Tests that updating an indexed embedded object updates the Lucene index as well.
+ *
+ * @throws Exception in case the test fails
+ */
+ public void testEmbeddedObjectUpdate() throws Exception {
+
+ State state = new State();
+ state.setName( "Bavaria" );
+ StateCandidate candiate = new StateCandidate();
+ candiate.setName( "Mueller" );
+ candiate.setState( state );
+ state.setCandidate( candiate );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( candiate );
+ tx.commit();
+ s.clear();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ tx = session.beginTransaction();
+
+ QueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(), new String[] { "name", "state.name" }, SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+ query = parser.parse( "Bavaria" );
+ result = session.createFullTextQuery( query, StateCandidate.class ).list();
+ assertEquals( "IndexEmbedded ignored.", 1, result.size() );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ state.setName( "Hessen" );
+ state = (State) s.merge( state );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ session = Search.getFullTextSession( s );
+ query = parser.parse( "Hessen" );
+ result = session.createFullTextQuery( query, StateCandidate.class ).list();
+ assertEquals( "IndexEmbedded ignored.", 1, result.size() );
+ tx.commit();
+ s.clear();
+ s.close();
+ }
+
+ protected void configure( org.hibernate.cfg.Configuration cfg ) {
+ super.configure( cfg );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { Tower.class, Address.class, Product.class, Order.class, Author.class, Country.class,
+ State.class, StateCandidate.class, NonIndexedEntity.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/EmbeddedTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/NonIndexedEntity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/NonIndexedEntity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/NonIndexedEntity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class NonIndexedEntity {
+ @Id
+ @GeneratedValue
+ private int id;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/NonIndexedEntity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Order.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Order.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Order.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+import javax.persistence.Table;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Table(name = "`Order`")
+public class Order {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index= Index.UN_TOKENIZED)
+ private String orderNumber;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getOrderNumber() {
+ return orderNumber;
+ }
+
+ public void setOrderNumber(String orderNumber) {
+ this.orderNumber = orderNumber;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Order.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Owner.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Owner.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Owner.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,62 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.Embeddable;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.annotations.Parent;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Embeddable
+public class Owner implements Person {
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+ @Parent
+ @IndexedEmbedded //play the lunatic user
+ private Address address;
+
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public Address getAddress() {
+ return address;
+ }
+
+ public void setAddress(Address address) {
+ this.address = address;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Owner.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Person.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Person.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Person.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,38 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public interface Person {
+ public String getName();
+
+ public void setName(String name);
+
+ public Address getAddress();
+
+ public void setAddress(Address address);
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Person.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Product.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Product.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Product.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,99 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import java.util.Set;
+import java.util.Map;
+import java.util.HashSet;
+import java.util.HashMap;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+import javax.persistence.ManyToMany;
+import javax.persistence.Column;
+import javax.persistence.CascadeType;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.annotations.MapKey;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Product {
+
+ @Id @GeneratedValue @DocumentId
+ private Integer id;
+
+ @Field(index= Index.TOKENIZED)
+ private String name;
+
+ @ManyToMany(cascade = CascadeType.REMOVE) //just to make the test easier, cascade doesn't really make any business sense
+ @IndexedEmbedded
+ private Set<Author> authors = new HashSet<Author>();
+
+ @ManyToMany(cascade = CascadeType.REMOVE) //just to make the test easier, cascade doesn't really make any business sense
+ @MapKey(columns = @Column(name="CUST_NAME",nullable=false) )
+ @IndexedEmbedded
+ private Map<String, Order> orders = new HashMap<String, Order>();
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public Set<Author> getAuthors() {
+ return authors;
+ }
+
+ public void setAuthors(Set<Author> authors) {
+ this.authors = authors;
+ }
+
+ public Map<String, Order> getOrders() {
+ return orders;
+ }
+
+ public void setOrders(Map<String, Order> orders) {
+ this.orders = orders;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Product.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/State.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/State.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/State.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,77 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToOne;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class State {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String name;
+
+ @ContainedIn
+ @OneToOne(mappedBy = "state", cascade = CascadeType.ALL)
+ private StateCandidate candidate;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public StateCandidate getCandidate() {
+ return candidate;
+ }
+
+ public void setCandidate( StateCandidate candidate ) {
+ this.candidate = candidate;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/State.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/StateCandidate.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/StateCandidate.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/StateCandidate.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,92 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToOne;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ *
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class StateCandidate implements Person {
+
+ @Id @GeneratedValue
+ @DocumentId
+ private int id;
+
+ @Field
+ private String name;
+
+ @OneToOne(cascade = CascadeType.ALL)
+ private Address address;
+
+ @IndexedEmbedded
+ @OneToOne(cascade = CascadeType.ALL)
+ private State state;
+
+ public State getState() {
+ return state;
+ }
+
+ public void setState( State state ) {
+ this.state = state;
+ }
+
+ public Address getAddress() {
+ return address;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setAddress( Address address ) {
+ this.address = address;
+
+ }
+
+ public void setName( String name ) {
+ this.name = name;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId( int id ) {
+ this.id = id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/StateCandidate.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Tower.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Tower.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Tower.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,81 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Tower {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+ @ManyToOne(cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ private Address address;
+
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public Address getAddress() {
+ return address;
+ }
+
+ public void setAddress(Address address) {
+ this.address = address;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/Tower.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Address.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Address.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Address.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,273 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+import org.hibernate.annotations.Type;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Indexed
+@Table(name="T_ADDRESS")
+public class Address implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+
+ @Id @GeneratedValue(strategy=GenerationType.AUTO)
+ @Column(name="A_ADDRESS_ID")
+ @DocumentId
+ private long id;
+
+ @Column(name="A_ADDRESS1")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String address1;
+
+ @Column(name="A_ADDRESS2")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String address2;
+
+ @Column(name="A_TOWN")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String town;
+
+ @Column(name="A_COUNTY")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String county;
+
+ @Column(name="A_COUNTRY")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String country;
+
+ @Column(name="A_POSTCODE")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String postcode;
+
+ @Column(name="A_ACTIVE")
+ @Type(type="boolean")
+ private boolean active;
+
+ @Column(name="A_CREATEDON")
+ @Type(type="java.util.Date")
+ private Date createdOn;
+
+ @Column(name="A_LASTUPDATEDON")
+ @Type(type="java.util.Date")
+ private Date lastUpdatedOn;
+
+ @ManyToOne
+ @JoinColumn(name="C_CONTACT_ID")
+ @IndexedEmbedded
+ private Contact contact;
+
+ public Address(String address1, String address2, String town,
+ String county, String country, String postcode, boolean active, Contact contact) {
+ super();
+ this.address1 = address1;
+ this.address2 = address2;
+ this.town = town;
+ this.county = county;
+ this.country = country;
+ this.postcode = postcode;
+ this.active = active;
+ this.contact = contact;
+ }
+
+ public Address() {
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public void setId(long id) {
+ this.id = id;
+ }
+
+ public String getAddress1() {
+ return address1;
+ }
+
+ public void setAddress1(String address1) {
+ this.address1 = address1;
+ }
+
+ public String getAddress2() {
+ if (null == this.address2 || "".equals(this.address2)) {
+ return "N/A";
+ }
+ return address2;
+ }
+
+ public void setAddress2(String address2) {
+ this.address2 = address2;
+ }
+
+ public String getTown() {
+ return town;
+ }
+
+ public void setTown(String town) {
+ this.town = town;
+ }
+
+ public String getCounty() {
+ if (null == this.county || "".equals(this.county)) {
+ return "N/A";
+ }
+ return county;
+ }
+
+ public void setCounty(String county) {
+ this.county = county;
+ }
+
+ public String getCountry() {
+ return country;
+ }
+
+ public void setCountry(String country) {
+ this.country = country;
+ }
+
+ public String getPostcode() {
+ return postcode;
+ }
+
+ public void setPostcode(String postcode) {
+ this.postcode = postcode;
+ }
+
+ public boolean isActive() {
+ return active;
+ }
+
+ public void setActive(boolean active) {
+ this.active = active;
+ }
+
+
+ public Date getCreatedOn() {
+ return createdOn;
+ }
+
+ public void setCreatedOn(Date createdOn) {
+ this.createdOn = createdOn;
+ }
+
+ public Date getLastUpdatedOn() {
+ return lastUpdatedOn;
+ }
+
+ public void setLastUpdatedOn(Date lastUpdatedOn) {
+ this.lastUpdatedOn = lastUpdatedOn;
+ }
+
+
+
+ public Contact getContact() {
+ return contact;
+ }
+
+ public void setContact(Contact contact) {
+ this.contact = contact;
+ }
+
+ public boolean equals(Object object) {
+ if (!(object instanceof Address)) {
+ return false;
+ }
+ Address that = (Address)object;
+ if ( ! equals(this.getAddress1(), that.getAddress1() ) ) return false;
+ if ( ! equals(this.getAddress2(), that.getAddress2() ) ) return false;
+ if ( ! equals(this.getCounty(), that.getCounty() ) ) return false;
+ if ( ! equals(this.getTown(), that.getTown() ) ) return false;
+ if ( ! equals(this.getPostcode(), that.getPostcode() ) ) return false;
+ return equals( this.getContact(), that.getContact() );
+ // EqualsBuilder equalsBuilder = new EqualsBuilder();
+// return equalsBuilder.append(new Object[]{this.getAddress1(), this.getAddress2(), this.getCounty(), this.getTown(), this.getPostcode(), this.contact}, new Object[]{address.getAddress1(), address.getAddress2(), address.getCounty(), address.getTown(), address.getPostcode(), address.getContact()}).isEquals();
+ }
+
+ private boolean equals(Object o1, Object o2) {
+ if ( o1 == o2 ) return true;
+ if ( o1 == null || o2 == null ) return false;
+ return o1.equals( o1.equals( o2 ) );
+ }
+
+ private int hashCode(Object o) {
+ return o == null ? 0 : o.hashCode();
+ }
+
+
+ public int hashCode() {
+ int a = 13;
+ a = a*23 + hashCode( this.getAddress1());
+ a = a*23 + hashCode( this.getAddress2());
+ a = a*23 + hashCode( this.getCounty());
+ a = a*23 + hashCode( this.getTown());
+ a = a*23 + hashCode( this.getPostcode());
+ a = a*23 + hashCode( this.getContact());
+ return a;
+// return new HashCodeBuilder().append(new Object[]{this.getAddress1(), this.getAddress2(), this.getCounty(), this.getTown(), this.getPostcode(), this.getContact()}).hashCode();
+ }
+
+
+ public String toString() {
+ StringBuffer buf = new StringBuffer();
+ displayAddress(buf, this);
+ return buf.toString();
+ }
+ private void displayAddress(StringBuffer buf, Address address) {
+// buf.append(Constants.TAB + Constants.TAB + "Address 1: " + address.getAddress1() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"Address 2: " + address.getAddress2() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"Town: " + address.getTown() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"County: " + address.getCounty() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"Postcode: " + address.getPostcode() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"Country: " + address.getCountry() + Constants.NEW_LINE);
+// buf.append(Constants.TAB + Constants.TAB +"Is current: " + (address.isActive()? "Yes" : "No") + Constants.NEW_LINE);
+// buf.append(Constants.NEW_LINE);
+ }
+
+ public boolean isValidPostcode() {
+
+ return false;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Address.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/BusinessContact.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/BusinessContact.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/BusinessContact.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,94 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import javax.persistence.Column;
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@DiscriminatorValue("BusinessContact")
+@Indexed
+public class BusinessContact extends Contact {
+
+ @Column(name="P_BUSINESSNAME")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String businessName;
+
+ @Column(name="P_BUSINESSURL")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String url;
+
+ public BusinessContact() {
+ }
+
+ public String getBusinessName() {
+ return businessName;
+ }
+
+ public void setBusinessName(String businessName) {
+ this.businessName = businessName;
+ }
+
+ public String getUrl() {
+ if (null == this.url || "".equals(this.url)) {
+ return "Not provided";
+ }
+ return url;
+ }
+
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+// public boolean equals(Object object) {
+// if (!(object instanceof BusinessContact)) {
+// return false;
+// }
+// BusinessContact businessContact = (BusinessContact)object;
+// return new EqualsBuilder().append(new Object[]{this.getId(), this.getBusinessName(), this.getUrl()}, new Object[]{businessContact.getId(), businessContact.getBusinessName(), businessContact.getUrl()}).isEquals();
+// }
+//
+// public int hashCode() {
+// return new HashCodeBuilder().append(new Object[]{new Long(this.getId()), this.getBusinessName(), this.getUrl()}).toHashCode();
+// }
+// public String toString() {
+// StringBuffer buf = new StringBuffer();
+// buf.append("Business Name: " + this.getBusinessName() + Constants.NEW_LINE);
+// buf.append("Business Url: " + this.getUrl() + Constants.NEW_LINE);
+// buf.append("Email: " + this.getEmail() + Constants.NEW_LINE);
+// super.displayPhonesAndAddresses(buf);
+// return buf.toString();
+// }
+
+
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/BusinessContact.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Contact.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Contact.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Contact.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,241 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import java.io.Serializable;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Column;
+import javax.persistence.DiscriminatorColumn;
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.Inheritance;
+import javax.persistence.InheritanceType;
+import javax.persistence.OneToMany;
+import javax.persistence.Table;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.Predicate;
+import org.hibernate.annotations.Type;
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Table(name="T_CONTACT")
+(a)Inheritance(strategy=InheritanceType.SINGLE_TABLE)
+@DiscriminatorValue("Contact")
+@DiscriminatorColumn(name="contactType",discriminatorType=javax.persistence.DiscriminatorType.STRING)
+@Indexed
+public class Contact implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ @Id @GeneratedValue(strategy=GenerationType.AUTO)
+ @Column(name="C_CONTACT_ID")
+ @DocumentId
+ private long id;
+
+ @Column(name="C_EMAIL")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String email;
+
+ @Column(name="C_CREATEDON")
+ @Type(type="java.util.Date")
+ private Date createdOn;
+
+ @Column(name="C_LASTUPDATEDON")
+ @Type(type="java.util.Date")
+ private Date lastUpdatedOn;
+
+ @ContainedIn
+ @OneToMany( cascade = { CascadeType.ALL}, fetch=FetchType.EAGER)
+ @Type(type="java.util.Set")
+ private Set<Address> addresses;
+
+ @ContainedIn
+ @OneToMany(cascade = { CascadeType.ALL}, fetch=FetchType.EAGER)
+ @Type(type="java.util.Set")
+ private Set<Phone> phoneNumbers;
+
+ @Column(name="C_NOTES")
+ private String notes;
+
+ public Contact() {
+ }
+
+ public long getId() {
+ return id;
+ }
+ public void setId(long id) {
+ this.id = id;
+ }
+ public String getEmail() {
+ if (null == this.email || "".equals(this.email)) {
+ return "N/A";
+ }
+ return email;
+ }
+ public void setEmail(String email) {
+ this.email = email;
+ }
+ public Date getCreatedOn() {
+ return createdOn;
+ }
+ public void setCreatedOn(Date createdOn) {
+ this.createdOn = createdOn;
+ }
+ public Date getLastUpdatedOn() {
+ return lastUpdatedOn;
+ }
+ public void setLastUpdatedOn(Date lastUpdatedOn) {
+ this.lastUpdatedOn = lastUpdatedOn;
+ }
+ public Set<Address> getAddresses() {
+ return addresses;
+ }
+ public void setAddresses(Set<Address> addresses) {
+ this.addresses = addresses;
+ }
+ public Set<Phone> getPhoneNumbers() {
+ return phoneNumbers;
+ }
+ public void setPhoneNumbers(Set<Phone> phoneNumbers) {
+ this.phoneNumbers = phoneNumbers;
+ }
+
+
+ public String getNotes() {
+ return notes;
+ }
+
+ public void setNotes(String notes) {
+ this.notes = notes;
+ }
+
+ public void addAddressToContact(Address address) {
+ if (address == null) {
+ throw new IllegalArgumentException("Address cannot be null");
+ }
+ if (addresses == null) {
+ addresses = new HashSet<Address>();
+ }
+ address.setContact(this);
+ addresses.add(address);
+ }
+
+
+ public void addPhoneToContact(Phone phone) {
+ if (phone == null) {
+ throw new IllegalArgumentException("Phone cannot be null");
+ }
+ if (phoneNumbers == null) {
+ phoneNumbers = new HashSet<Phone>();
+ }
+ phone.setContact(this);
+ phoneNumbers.add(phone);
+ }
+
+
+ public void removePhoneFromContact(Phone phone) {
+ if (phone == null) {
+ throw new IllegalArgumentException("Phone cannot be null");
+ }
+ if (this.phoneNumbers.contains(phone)) {
+ this.phoneNumbers.remove(phone);
+ }
+ }
+
+ public void removeAddressFromContact(Address address) {
+ if (address == null) {
+ throw new IllegalArgumentException("Address cannot be null");
+ }
+ if (this.addresses.contains(address)) {
+ this.addresses.remove(address);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected List<Phone> filterPhoneNumbersByType(final String phoneType) {
+// Assert.notNull(phoneType, "Phone type cannot be null");
+// Assert.hasText(phoneType, "Phone type cannot be empty");
+ return (List<Phone>)CollectionUtils.select(this.phoneNumbers, new Predicate() {
+ public boolean evaluate(Object object) {
+ Phone phone = (Phone)object;
+ return phoneType.equals(phone.getType());
+ }
+ });
+ }
+
+
+ @SuppressWarnings("unchecked")
+ protected List<Address> showActiveAddresses() {
+ return (List<Address>) CollectionUtils.select(this.addresses,new Predicate() {
+ public boolean evaluate(Object object) {
+ Address address = (Address)object;
+ return address.isActive();
+ }
+ });
+ }
+
+ @SuppressWarnings("unchecked")
+ protected List<Address> showInactiveAddresses() {
+ return (List<Address>) CollectionUtils.select(this.addresses, new Predicate() {
+ public boolean evaluate(Object object) {
+ Address address = (Address)object;
+ return !address.isActive();
+ }
+ });
+ }
+
+ protected void displayPhonesAndAddresses(StringBuffer buf) {
+// buf.append(Constants.NEW_LINE);
+// buf.append("Phone Detail(s):" + Constants.NEW_LINE);
+// if (null != this.getPhoneNumbers() && 0 != this.getPhoneNumbers().size()) {
+// for (Phone phone: this.getPhoneNumbers()) {
+// buf.append(phone);
+// }
+// }
+// buf.append(Constants.NEW_LINE);
+// buf.append("Address Details:" + Constants.NEW_LINE );
+// if (null != this.getAddresses() && 0 != this.getAddresses().size()) {
+// for (Address address: this.getAddresses()) {
+// buf.append(address);
+// }
+// }
+ }
+
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Contact.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/DoubleInsertEmbeddedTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/DoubleInsertEmbeddedTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/DoubleInsertEmbeddedTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,105 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import java.util.Date;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Query;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DoubleInsertEmbeddedTest extends SearchTestCase {
+ public void testDoubleInsert() throws Exception {
+ Address address = new Address();
+ address.setAddress1( "TEST1" );
+ address.setAddress2( "N/A" );
+ address.setTown( "TEST TOWN" );
+ address.setCounty( "TEST COUNTY" );
+ address.setCountry( "UK" );
+ address.setPostcode( "XXXXXXX" );
+ address.setActive( true );
+ address.setCreatedOn( new Date() );
+ address.setLastUpdatedOn( new Date() );
+
+ Phone phone = new Phone();
+ phone.setNumber( "01273234122" );
+ phone.setType( "HOME" );
+ phone.setCreatedOn( new Date() );
+ phone.setLastUpdatedOn( new Date() );
+
+ PersonalContact contact = new PersonalContact();
+ contact.setFirstname( "Amin" );
+ contact.setSurname( "Mohammed-Coleman" );
+ contact.setEmail( "address(a)hotmail.com" );
+ contact.setDateOfBirth( new Date() );
+ contact.setNotifyBirthDay( false );
+ contact.setCreatedOn( new Date() );
+ contact.setLastUpdatedOn( new Date() );
+ contact.setNotes( "TEST" );
+ contact.addAddressToContact( address );
+ contact.addPhoneToContact( phone );
+
+ FullTextSession s = Search.getFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ s.save( contact);
+ s.getTransaction().commit();
+
+ s.close();
+
+ s = Search.getFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ Term term = new Term("county", "county");
+ TermQuery termQuery = new TermQuery( term );
+ Query query = s.createFullTextQuery( termQuery );
+ assertEquals( 1, query.list().size() );
+ contact = (PersonalContact) s.get( PersonalContact.class, contact.getId() );
+ contact.getPhoneNumbers().clear();
+ contact.getAddresses().clear();
+ s.flush();
+ s.clear();
+ s.createQuery( "delete " + Address.class.getName() ).executeUpdate();
+ s.createQuery( "delete " + Phone.class.getName() ).executeUpdate();
+ s.createQuery( "delete " + Contact.class.getName() ).executeUpdate();
+ s.getTransaction().commit();
+
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Address.class,
+ Contact.class,
+ PersonalContact.class,
+ BusinessContact.class,
+ Phone.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/DoubleInsertEmbeddedTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/PersonalContact.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/PersonalContact.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/PersonalContact.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,183 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+
+import org.hibernate.annotations.Type;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@DiscriminatorValue("PersonalContact")
+@Indexed
+public class PersonalContact extends Contact {
+ private static final long serialVersionUID = 1L;
+
+ @Column(name="P_FIRSTNAME")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String firstname;
+
+ @Column(name="P_SURNAME")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String surname;
+
+ @Column(name="P_DATEOFBIRTH")
+ @Type(type="java.util.Date")
+ private Date dateOfBirth;
+
+ @Column(name="P_NOTIFYBIRTHDAY")
+ @Type(type="boolean")
+ private boolean notifyBirthDay;
+
+ @Column(name="P_MYFACESURL")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String myFacesUrl;
+
+ @Column(name="P_REMINDERCOUNT")
+ private int reminderCount;
+
+ @Column(name="P_REMINDERRESET")
+ @Type(type="boolean")
+ private boolean reset;
+
+
+ public PersonalContact() {
+ }
+
+ public String getFirstname() {
+ return firstname;
+ }
+ public void setFirstname(String firstname) {
+ this.firstname = firstname;
+ }
+ public String getSurname() {
+ return surname;
+ }
+ public void setSurname(String surname) {
+ this.surname = surname;
+ }
+ public Date getDateOfBirth() {
+ return dateOfBirth;
+ }
+ public void setDateOfBirth(Date dateOfBirth) {
+ this.dateOfBirth = dateOfBirth;
+ }
+
+ public boolean isNotifyBirthDay() {
+ return notifyBirthDay;
+ }
+
+ public void setNotifyBirthDay(boolean notifyBirthDay) {
+ this.notifyBirthDay = notifyBirthDay;
+ }
+
+
+ public String getMyFacesUrl() {
+ return myFacesUrl;
+ }
+
+ public void setMyFacesUrl(String myFacesUrl) {
+ this.myFacesUrl = myFacesUrl;
+ }
+
+
+
+ public int getReminderCount() {
+ return reminderCount;
+ }
+
+ public void setReminderCount(int reminderCount) {
+ this.reminderCount = reminderCount;
+ }
+
+
+ public boolean isReset() {
+ return reset;
+ }
+
+ public void setReset(boolean reset) {
+ this.reset = reset;
+ }
+
+ private boolean equals(Object o1, Object o2) {
+ if ( o1 == o2 ) return true;
+ if ( o1 == null || o2 == null ) return false;
+ return o1.equals( o1.equals( o2 ) );
+ }
+
+ private int hashCode(Object o) {
+ return o == null ? 0 : o.hashCode();
+ }
+
+ public boolean equals(Object object) {
+ if (!(object instanceof PersonalContact)) {
+ return false;
+ }
+ PersonalContact that = (PersonalContact)object;
+ if ( ! equals(this.getId(), that.getId() ) ) return false;
+ if ( ! equals(this.getFirstname(), that.getFirstname() ) ) return false;
+ if ( ! equals(this.getSurname(), that.getSurname() ) ) return false;
+ return true;
+ }
+
+ public int hashCode() {
+ int a = 13;
+ a = a*23 + hashCode( this.getId());
+ a = a*23 + hashCode( this.getFirstname());
+ a = a*23 + hashCode( this.getSurname());
+ return a;
+ }
+
+// public boolean equals(Object object) {
+// if (!(object instanceof PersonalContact)) {
+// return false;
+// }
+// PersonalContact personalContact = (PersonalContact)object;
+// return new EqualsBuilder().append(new Object[]{this.getId(), this.getFirstname(), this.getSurname()}, new Object[]{personalContact.getId(), personalContact.getFirstname(), personalContact.getSurname()}).isEquals();
+// }
+//
+// public int hashCode() {
+// return new HashCodeBuilder().append(new Object[]{new Long(this.getId()), this.getFirstname(), this.getSurname()}).toHashCode();
+// }
+//
+// public String toString() {
+// StringBuffer buf = new StringBuffer();
+// buf.append("First Name: " + this.getFirstname()+ Constants.NEW_LINE);
+// buf.append("Surname: " + this.getSurname() + Constants.NEW_LINE);
+// buf.append("Email: " + this.getEmail() + Constants.NEW_LINE);
+// buf.append("Date of Birth: " + (null == this.getDateOfBirth() ? "Not Provided" : this.getDateOfBirth()) + Constants.NEW_LINE);
+// displayPhonesAndAddresses(buf);
+// return buf.toString();
+// }
+
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/PersonalContact.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Phone.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Phone.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Phone.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,153 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.doubleinsert;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+import org.hibernate.annotations.Type;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Table(name="T_PHONE")
+@Indexed
+public class Phone implements Serializable{
+
+ private static final long serialVersionUID = 1L;
+
+ @Id @GeneratedValue(strategy=GenerationType.AUTO)
+ @Column(name="P_PHONE_ID")
+ @DocumentId
+ private long id;
+
+ @Column(name="P_NUMBER")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String number;
+
+ @Column(name="P_TYPE")
+ @Field(index=Index.TOKENIZED, store=Store.YES)
+ private String type;
+
+ @Column(name="P_CREATEDON")
+ @Type(type="java.util.Date")
+ private Date createdOn;
+
+ @Column(name="P_LASTUPDATEDON")
+ @Type(type="java.util.Date")
+ private Date lastUpdatedOn;
+
+ @ManyToOne
+ @JoinColumn(name="C_CONTACT_ID")
+ @IndexedEmbedded
+ private Contact contact;
+
+
+ public Phone() {
+ }
+
+ public long getId() {
+ return id;
+ }
+ public void setId(long id) {
+ this.id = id;
+ }
+ public String getNumber() {
+ return number;
+ }
+
+ public void setNumber(String number) {
+ this.number = number;
+ }
+
+ public String getType() {
+ if (null == this.type || "".equals(this.type)) {
+ return "N/A";
+ }
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+ public Date getCreatedOn() {
+ return createdOn;
+ }
+ public void setCreatedOn(Date createdOn) {
+ this.createdOn = createdOn;
+ }
+ public Date getLastUpdatedOn() {
+ return lastUpdatedOn;
+ }
+ public void setLastUpdatedOn(Date lastUpdatedOn) {
+ this.lastUpdatedOn = lastUpdatedOn;
+ }
+
+
+ public Contact getContact() {
+ return contact;
+ }
+
+ public void setContact(Contact contact) {
+ this.contact = contact;
+ }
+
+
+// public int hashCode() {
+// return new HashCodeBuilder().append(new Object[]{this.number, this.type}).hashCode();
+// }
+//
+// public boolean equals(Object object) {
+// if (!(object instanceof Phone)) {
+// return false;
+// }
+//
+// return new EqualsBuilder().append(new Object[]{}, new Object[]{}).isEquals();
+// }
+//
+// public String toString() {
+// StringBuffer buf = new StringBuffer();
+// displayPhoneDetails(buf, this);
+// return buf.toString();
+// }
+
+ private void displayPhoneDetails(StringBuffer buf, Phone phone) {
+// buf.append(Constants.TAB + Constants.TAB + "Type: " + phone.getType() );
+// buf.append(Constants.SPACE + "Number: " + phone.getNumber() + Constants.NEW_LINE);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/doubleinsert/Phone.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Address.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Address.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Address.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,94 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class Address {
+ @Id
+ @GeneratedValue
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String street;
+
+ @Field(index = Index.TOKENIZED)
+ private String city;
+
+ @ContainedIn
+ @OneToMany(mappedBy = "address")
+ private Set<Place> places;
+
+ public Address(String street, String city) {
+ this();
+ this.street = street;
+ this.city = city;
+ }
+
+ private Address() {
+ places = new HashSet<Place>();
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public String getStreet() {
+ return street;
+ }
+
+ public String getCity() {
+ return city;
+ }
+
+ public Set<Place> getPlaces() {
+ return places;
+ }
+
+ public void addPlace(Place place) {
+ places.add( place );
+ }
+
+ public void setStreet(String street) {
+ this.street = street;
+ }
+
+ public void setCity(String city) {
+ this.city = city;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Address.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Attribute.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Attribute.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Attribute.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,86 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToMany;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class Attribute {
+
+ @Id
+ @GeneratedValue
+ private long id;
+
+ @ManyToOne
+ @ContainedIn
+ private Product product;
+
+ @OneToMany(mappedBy = "attribute", fetch = FetchType.LAZY, cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ private List<AttributeValue> values;
+
+ private Attribute() {
+ values = new ArrayList<AttributeValue>();
+ }
+
+ public Attribute(Product product) {
+ this.product = product;
+ values = new ArrayList<AttributeValue>();
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public Product getProduct() {
+ return product;
+ }
+
+ public void setProduct(Product product) {
+ this.product = product;
+ }
+
+ public List<AttributeValue> getValues() {
+ return values;
+ }
+
+ public void setValue(AttributeValue value) {
+ values.add( value );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Attribute.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/AttributeValue.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/AttributeValue.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/AttributeValue.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,84 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class AttributeValue {
+
+ @Id
+ @GeneratedValue
+ private long id;
+
+ @ManyToOne(targetEntity = Attribute.class, fetch = FetchType.EAGER)
+ @ContainedIn
+ private Attribute attribute;
+
+ @Column(name = "_value")
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ private String value;
+
+ private AttributeValue() {
+ }
+
+ public AttributeValue(Attribute attribute, String value) {
+ this.attribute = attribute;
+ this.value = value;
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public Attribute getAttribute() {
+ return attribute;
+ }
+
+ public void setAttribute(Attribute attribute) {
+ this.attribute = attribute;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/AttributeValue.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/NestedEmbeddedTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/NestedEmbeddedTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/NestedEmbeddedTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,159 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class NestedEmbeddedTest extends SearchTestCase {
+
+ /**
+ * HSEARCH-391
+ *
+ * @throws Exception in case the tests fails
+ */
+ public void testNestedEmbeddedIndexing() throws Exception {
+ Product product = new Product();
+ Attribute attribute = new Attribute( product );
+ product.setAttribute( attribute );
+ AttributeValue value = new AttributeValue( attribute, "foo" );
+ attribute.setValue( value );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( product );
+ tx.commit();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "attributes.values.value", SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+
+ query = parser.parse( "foo" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "unable to find property in attribute value", 1, result.size() );
+
+
+ s.clear();
+ tx = s.beginTransaction();
+
+ product = ( Product ) s.get( Product.class, product.getId() );
+ product.getAttributes().get( 0 ).getValues().get( 0 ).setValue( "bar" );
+ tx.commit();
+
+ s.clear();
+
+ session = Search.getFullTextSession( s );
+
+ query = parser.parse( "foo" );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 0, result.size() );
+
+ query = parser.parse( "bar" );
+ result = session.createFullTextQuery( query, Product.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 1, result.size() );
+
+ s.close();
+ }
+
+
+ /**
+ * HSEARCH-391
+ *
+ * @throws Exception in case the tests fails
+ */
+ public void testNestedEmbeddedIndexingWithContainedInOnCollection() throws Exception {
+ Person john = new Person( "John Doe" );
+ Place eiffelTower = new Place( "Eiffel Tower" );
+ Address addressEiffel = new Address( "Avenue Gustave Eiffel", "London" );
+ addressEiffel.addPlace( eiffelTower );
+ eiffelTower.setAddress( addressEiffel );
+ john.addPlaceVisited( eiffelTower );
+ eiffelTower.visitedBy( john );
+
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( john );
+ tx.commit();
+
+ FullTextSession session = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "placesVisited.address.city", SearchTestCase.standardAnalyzer );
+ Query query;
+ List<?> result;
+
+
+ query = parser.parse( "London" );
+ result = session.createFullTextQuery( query ).list();
+ assertEquals( "unable to find nested indexed value", 1, result.size() );
+
+
+ s.clear();
+ tx = s.beginTransaction();
+
+ john = ( Person ) s.get( Person.class, john.getId() );
+ john.getPlacesVisited().get( 0 ).getAddress().setCity( "Paris" );
+ tx.commit();
+
+ s.clear();
+
+ john = ( Person ) s.get( Person.class, john.getId() );
+
+ session = Search.getFullTextSession( s );
+
+ query = parser.parse( "London" );
+ result = session.createFullTextQuery( query, Person.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 0, result.size() );
+
+ query = parser.parse( "Paris" );
+ result = session.createFullTextQuery( query, Person.class ).list();
+ assertEquals( "change in embedded not reflected in root index", 1, result.size() );
+
+ s.close();
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Product.class, Attribute.class, AttributeValue.class, Person.class, Place.class, Address.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/NestedEmbeddedTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Person.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Person.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Person.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class Person {
+ @Id
+ @GeneratedValue
+ private long id;
+
+ String name;
+
+ @IndexedEmbedded
+ @ManyToMany(cascade = { CascadeType.ALL })
+ private List<Place> placesVisited;
+
+ private Person() {
+ placesVisited = new ArrayList<Place>( 0 );
+ }
+
+ public Person(String name) {
+ this();
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public List<Place> getPlacesVisited() {
+ return placesVisited;
+ }
+
+ public void addPlaceVisited(Place place) {
+ placesVisited.add( place );
+ }
+
+ public long getId() {
+ return id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Person.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Place.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Place.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Place.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,94 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+import javax.persistence.OneToOne;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class Place {
+ @Id
+ @GeneratedValue
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+ @OneToOne(cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ private Address address;
+
+ @ContainedIn
+ @ManyToMany(cascade = { CascadeType.ALL }, mappedBy = "placesVisited")
+ private Set<Person> visitedBy;
+
+ private Place() {
+ this.visitedBy = new HashSet<Person>();
+ }
+
+ public Place(String name) {
+ this();
+ this.name = name;
+ }
+
+ public Address getAddress() {
+ return address;
+ }
+
+ public String getName() {
+
+ return name;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setAddress(Address address) {
+ this.address = address;
+ }
+
+ public void visitedBy(Person person) {
+ visitedBy.add( person );
+ }
+
+ public Set<Person> getVisitedBy() {
+ return visitedBy;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Place.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Product.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Product.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Product.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,68 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.embedded.nested;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.OneToMany;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class Product {
+ @Id
+ @GeneratedValue
+ private long id;
+
+ @OneToMany(mappedBy = "product", fetch = FetchType.EAGER, cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ private List<Attribute> attributes;
+
+ public Product() {
+ attributes = new ArrayList<Attribute>();
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public List<Attribute> getAttributes() {
+ return attributes;
+ }
+
+ public void setAttribute(Attribute attribute) {
+ attributes.add( attribute );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/embedded/nested/Product.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusLine.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusLine.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusLine.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,115 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.engine;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * Test entity: BusLine have many BusStops: needed to verify
+ * indexing of a lazy-loaded collection in out-of-transaction use case.
+ *
+ * @author Sanne Grinovero
+ */
+@Entity
+@Indexed
+public class BusLine {
+
+ private Long id;
+ private String busLineName;
+ private Set<BusStop> stops = new HashSet<BusStop>();
+
+ @Id
+ @GeneratedValue
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Field(index = Index.NO, store = Store.YES)
+ public String getBusLineName() {
+ return busLineName;
+ }
+
+ public void setBusLineName(String busLine) {
+ this.busLineName = busLine;
+ }
+
+ @ManyToMany(cascade = CascadeType.ALL)
+ @IndexedEmbedded
+ public Set<BusStop> getStops() {
+ return stops;
+ }
+
+ public void setStops(Set<BusStop> stops) {
+ this.stops = stops;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ( ( busLineName == null ) ? 0 : busLineName.hashCode() );
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if ( this == obj ) {
+ return true;
+ }
+ if ( obj == null ) {
+ return false;
+ }
+ if ( getClass() != obj.getClass() ) {
+ return false;
+ }
+ BusLine other = ( BusLine ) obj;
+ if ( busLineName == null ) {
+ if ( other.busLineName != null ) {
+ return false;
+ }
+ }
+ else if ( !busLineName.equals( other.busLineName ) ) {
+ return false;
+ }
+ return true;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusLine.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusStop.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusStop.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusStop.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,111 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.engine;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+
+import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * Test entity: BusStop is @ContainedIn BusLine
+ *
+ * @author Sanne Grinovero
+ */
+@Entity
+public class BusStop {
+
+ private Long id;
+ private String roadName;
+ private Set<BusLine> busses = new HashSet<BusLine>();
+
+ @Id
+ @GeneratedValue
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Field
+ public String getRoadName() {
+ return roadName;
+ }
+
+ public void setRoadName(String roadName) {
+ this.roadName = roadName;
+ }
+
+ @ManyToMany(mappedBy = "stops", cascade = CascadeType.ALL)
+ @ContainedIn
+ public Set<BusLine> getBusses() {
+ return busses;
+ }
+
+ public void setBusses(Set<BusLine> busses) {
+ this.busses = busses;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + ( ( roadName == null ) ? 0 : roadName.hashCode() );
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if ( this == obj ) {
+ return true;
+ }
+ if ( obj == null ) {
+ return false;
+ }
+ if ( getClass() != obj.getClass() ) {
+ return false;
+ }
+ BusStop other = ( BusStop ) obj;
+ if ( roadName == null ) {
+ if ( other.roadName != null ) {
+ return false;
+ }
+ }
+ else if ( !roadName.equals( other.roadName ) ) {
+ return false;
+ }
+ return true;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/BusStop.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/EventListenerSerializationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/EventListenerSerializationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/EventListenerSerializationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,50 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.engine;
+
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
+import org.hibernate.search.event.FullTextIndexEventListener;
+import org.hibernate.search.test.SerializationTestHelper;
+
+/**
+ * Tests that the FullTextIndexEventListener is Serializable
+ *
+ * @author Sanne Grinovero
+ */
+public class EventListenerSerializationTest extends TestCase {
+
+ public void testEventListenerSerializable() throws IOException, ClassNotFoundException {
+ FullTextIndexEventListener eventListener = new FullTextIndexEventListener();
+ eventListener.addSynchronization( null, null );
+ Object secondListener = SerializationTestHelper
+ .duplicateBySerialization(eventListener);
+ assertNotNull(secondListener);
+ assertFalse(secondListener == eventListener);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/EventListenerSerializationTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/LazyCollectionsUpdatingTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/LazyCollectionsUpdatingTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/LazyCollectionsUpdatingTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,148 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.engine;
+
+import java.util.List;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * TestCase for HSEARCH-178 (Search hitting HHH-2763)
+ * Verifies that it's possible to index lazy loaded collections from
+ * indexed entities even when no transactions are used.
+ *
+ * @author Sanne Grinovero
+ */
+public class LazyCollectionsUpdatingTest extends SearchTestCase {
+
+ public void testUpdatingInTransaction() {
+ assertFindsByRoadName( "buonarroti" );
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ try {
+ Transaction tx = fullTextSession.beginTransaction();
+ List list = fullTextSession.createCriteria( BusStop.class ).list();
+ assertNotNull( list );
+ assertEquals( 4, list.size() );
+ BusStop busStop = (BusStop) list.get( 1 );
+ busStop.setRoadName( "new road" );
+ tx.commit();
+ }
+ catch (org.hibernate.AssertionFailure ass) {
+ fail( ass.getMessage() );
+ }
+ finally {
+ fullTextSession.close();
+ }
+ assertFindsByRoadName( "new" );
+ }
+
+ public void testUpdatingOutOfTransaction() {
+ assertFindsByRoadName( "buonarroti" );
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ try {
+ List list = fullTextSession.createCriteria( BusStop.class ).list();
+ assertNotNull( list );
+ assertEquals( 4, list.size() );
+ BusStop busStop = (BusStop) list.get( 1 );
+ busStop.setRoadName( "new road" );
+ fullTextSession.flush();
+ }
+ catch (org.hibernate.AssertionFailure ass) {
+ fail( ass.getMessage() );
+ }
+ finally {
+ fullTextSession.close();
+ }
+ assertFindsByRoadName( "new" );
+ }
+
+ public void assertFindsByRoadName(String analyzedRoadname) {
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ Transaction tx = fullTextSession.beginTransaction();
+ TermQuery ftQuery = new TermQuery( new Term( "stops.roadName", analyzedRoadname ) );
+ FullTextQuery query = fullTextSession.createFullTextQuery( ftQuery, BusLine.class );
+ query.setProjection( "busLineName" );
+ assertEquals( 1, query.list().size() );
+ List results = query.list();
+ String resultName = (String) ((Object[])results.get(0))[0];
+ assertEquals( "Linea 64", resultName );
+ tx.commit();
+ fullTextSession.close();
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ openSession();
+ Transaction tx = null;
+ try {
+ tx = session.beginTransaction();
+ BusLine bus = new BusLine();
+ bus.setBusLineName( "Linea 64" );
+ addBusStop( bus, "Stazione Termini" );
+ addBusStop( bus, "via Gregorio VII" );
+ addBusStop( bus, "via Alessandro III" );
+ addBusStop( bus, "via M.Buonarroti" );
+ session.persist( bus );
+ tx.commit();
+ } catch (Throwable t) {
+ if ( tx != null )
+ tx.rollback();
+ } finally {
+ session.close();
+ }
+ }
+
+ private void addBusStop(BusLine bus, String roadName) {
+ BusStop stop = new BusStop();
+ stop.setRoadName( roadName );
+ bus.getStops().add( stop );
+ stop.getBusses().add( bus );
+ }
+
+ // Test setup options - Entities
+ @Override
+ protected Class<?>[] getMappings() {
+ return new Class[] { BusLine.class, BusStop.class };
+ }
+
+ // Test setup options - SessionFactory Properties
+ @Override
+ protected void configure(org.hibernate.cfg.Configuration configuration) {
+ super.configure( configuration );
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, SimpleAnalyzer.class.getName() );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/LazyCollectionsUpdatingTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/RollbackTransactionTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/RollbackTransactionTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/RollbackTransactionTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,113 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.engine;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Verify index changes queued during a transaction are canceled
+ * when the transaction is rolled back.
+ *
+ * @author Sanne Grinovero
+ */
+public class RollbackTransactionTest extends SearchTestCase {
+
+ public void testTransactionBehaviour() {
+ assertEquals( 0, countBusLinesByFullText() );
+ assertEquals( 0, countBusLineByDatabaseCount() );
+ createBusLines( 5, true );
+ assertEquals( 0, countBusLinesByFullText() );
+ assertEquals( 0, countBusLineByDatabaseCount() );
+ createBusLines( 5, false );
+ assertEquals( 5, countBusLinesByFullText() );
+ assertEquals( 5, countBusLineByDatabaseCount() );
+ createBusLines( 7, true );
+ assertEquals( 5, countBusLinesByFullText() );
+ assertEquals( 5, countBusLineByDatabaseCount() );
+ createBusLines( 7, false );
+ assertEquals( 12, countBusLinesByFullText() );
+ assertEquals( 12, countBusLineByDatabaseCount() );
+ }
+
+ private void createBusLines(int number, boolean rollback) {
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ Transaction tx = fullTextSession.beginTransaction();
+ for (int i=0; i<number; i++ ) {
+ BusLine line = new BusLine();
+ line.setBusLineName( "line " + i );
+ fullTextSession.persist( line );
+ }
+ if ( rollback ) {
+ tx.rollback();
+ }
+ else {
+ tx.commit();
+ }
+ fullTextSession.close();
+ }
+
+ public int countBusLinesByFullText() {
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ Transaction tx = fullTextSession.beginTransaction();
+ org.apache.lucene.search.Query ftQuery = new MatchAllDocsQuery();
+ FullTextQuery query = fullTextSession.createFullTextQuery( ftQuery, BusLine.class );
+ int count = query.list().size();
+ tx.commit();
+ fullTextSession.close();
+ return count;
+ }
+
+ public int countBusLineByDatabaseCount() {
+ FullTextSession fullTextSession = Search.getFullTextSession( sessions.openSession() );
+ Transaction tx = fullTextSession.beginTransaction();
+ int count = fullTextSession.createCriteria( BusLine.class ).list().size();
+ tx.commit();
+ fullTextSession.close();
+ return count;
+ }
+
+ // Test setup options - Entities
+ @Override
+ protected Class<?>[] getMappings() {
+ return new Class[] { BusLine.class, BusStop.class };
+ }
+
+ // Test setup options - SessionFactory Properties
+ @Override
+ protected void configure(org.hibernate.cfg.Configuration configuration) {
+ super.configure( configuration );
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, SimpleAnalyzer.class.getName() );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/engine/RollbackTransactionTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/Document.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/Document.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/Document.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,103 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.fieldAccess;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Lob;
+
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Richard Hallier
+ */
+@Entity
+@Indexed(index = "DocumentField")
+public class Document {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ @Boost(2)
+ private String title;
+
+ @Field(name="Abstract", index=Index.TOKENIZED, store= Store.NO)
+ private String summary;
+
+ @Lob
+ @Field(index=Index.TOKENIZED, store=Store.NO)
+ private String text;
+
+ Document() {
+ }
+
+ public Document(String title, String summary, String text) {
+ super();
+ this.summary = summary;
+ this.text = text;
+ this.title = title;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ public String getText() {
+ return text;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/Document.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/FieldAccessTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/FieldAccessTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.fieldAccess;
+
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.apache.lucene.queryParser.QueryParser;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class FieldAccessTest extends SearchTestCase {
+
+ public void testFields() throws Exception {
+ Document doc = new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" );
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( doc );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.getFullTextSession(s);
+ tx = session.beginTransaction();
+ QueryParser p = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ List result = session.createFullTextQuery( p.parse( "Abstract:Hibernate" ) ).list();
+ assertEquals( "Query by field", 1, result.size() );
+ s.delete( result.get( 0 ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ public void testFieldBoost() throws Exception {
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist(
+ new Document( "Hibernate in Action", "Object and Relational", "blah blah blah" )
+ );
+ s.persist(
+ new Document( "Object and Relational", "Hibernate in Action", "blah blah blah" )
+ );
+ tx.commit();
+
+ s.clear();
+
+ FullTextSession session = Search.getFullTextSession(s);
+ tx = session.beginTransaction();
+ QueryParser p = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.standardAnalyzer );
+ List result = session.createFullTextQuery( p.parse( "title:Action OR Abstract:Action" ) ).list();
+ assertEquals( "Query by field", 2, result.size() );
+ assertEquals( "@Boost fails", "Hibernate in Action", ( (Document) result.get( 0 ) ).getTitle() );
+ s.delete( result.get( 0 ) );
+ tx.commit();
+ s.close();
+
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Document.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/fieldAccess/FieldAccessTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/AndDocIdSetsTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/AndDocIdSetsTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/AndDocIdSetsTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,265 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.util.DocIdBitSet;
+import org.hibernate.search.filter.AndDocIdSet;
+
+import junit.framework.TestCase;
+
+/**
+ * Functionality testcase for org.hibernate.search.filter.AndDocIdSet.
+ * There is a main method to run some very approximate performance
+ * comparisons with the use of java.util.BitSet ands.
+ * The numbers show the AndDocIdSet should be used only when it's not
+ * possible to rely on a BitSet; in this class however we use BitSet
+ * as it's useful to test the implementation.
+ *
+ * @see AndDocIdSet
+ * @see BitSet
+ * @author Sanne Grinovero
+ */
+public class AndDocIdSetsTest extends TestCase {
+
+ static final List<Integer> testDataFrom0to9 = toImmutableList( 0,1,2,3,4,5,6,7,8,9 );
+ static final List<Integer> testDataFrom1to10 = toImmutableList( 1,2,3,4,5,6,7,8,9,10 );
+ static final List<Integer> testDataFrom1to9 = toImmutableList( 1,2,3,4,5,6,7,8,9 );
+
+ private static final List<Integer> toImmutableList(int... is) {
+ List<Integer> l = new ArrayList<Integer>( is.length );
+ for ( int i=0; i<is.length; i++ ) {
+ l.add( Integer.valueOf( is[i] ) );
+ }
+ return Collections.unmodifiableList( l );
+ }
+
+ @SuppressWarnings("unchecked")
+ List<Integer> andLists(List<Integer>... lists) {
+ if (lists.length==0) {
+ return Collections.EMPTY_LIST;
+ }
+ List<Integer> result = new ArrayList<Integer>( lists[0] );
+ for (int i=1; i<lists.length; i++) {
+ result.retainAll( lists[i] );
+ }
+ return result;
+ }
+
+ // auto-testing of test utility methods for AND operations on test arrays
+ @SuppressWarnings("unchecked")
+ public void testAndingArrays() {
+ List<Integer> andLists = andLists( testDataFrom0to9, testDataFrom1to10 );
+ assertTrue( andLists.containsAll( testDataFrom1to9 ) );
+ assertFalse( andLists.contains( Integer.valueOf( 0 ) ) );
+ assertFalse( andLists.contains( Integer.valueOf( 10 ) ) );
+ assertTrue( andLists.equals( testDataFrom1to9 ) );
+ DocIdSet docIdSet0_9 = arrayToDocIdSet(testDataFrom0to9);
+ DocIdSet docIdSet1_10 = arrayToDocIdSet(testDataFrom1to10);
+ DocIdSet docIdSet1_9 = arrayToDocIdSet(testDataFrom1to9);
+ assertTrue( docIdSetsEqual( docIdSet0_9, docIdSet0_9 ) );
+ assertTrue( docIdSetsEqual( docIdSet1_10, docIdSet1_10 ) );
+ assertFalse( docIdSetsEqual( docIdSet1_10, docIdSet1_9 ) );
+ assertFalse( docIdSetsEqual( docIdSet0_9, docIdSet1_9 ) );
+ }
+
+ // auto-testing of test utility methods for conversion in DocIdSetIterator
+ public void testIteratorMatchesTestArray() throws IOException {
+ DocIdSet docIdSet0_9 = arrayToDocIdSet(testDataFrom0to9);
+ DocIdSetIterator docIdSetIterator = docIdSet0_9.iterator();
+ assertTrue( docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS );
+ assertEquals( 0, docIdSetIterator.docID() );
+ assertEquals( 9, docIdSetIterator.advance(9) );
+ assertEquals( DocIdSetIterator.NO_MORE_DOCS, docIdSetIterator.advance(10) );
+ }
+
+ public void testAndDocIdSets() {
+ List<DocIdSet> filters = new ArrayList<DocIdSet>( 2 );
+ filters.add( arrayToDocIdSet( testDataFrom0to9 ) );
+ filters.add( arrayToDocIdSet( testDataFrom1to10 ) );
+ DocIdSet expected = arrayToDocIdSet( testDataFrom1to9 );
+ DocIdSet testedSet = new AndDocIdSet( filters, 10 );
+ assertTrue( docIdSetsEqual( expected, testedSet ) );
+ }
+
+ public void testOnRandomBigArrays(){
+ onRandomBigArraysTest( 13L );
+ onRandomBigArraysTest( 9L );
+ onRandomBigArraysTest( 71L );
+ }
+
+ public void onRandomBigArraysTest(long randomSeed) {
+ List<BitSet> filtersData = makeRandomBitSetList( randomSeed, 4, 1000000, 1500000 );
+ BitSet expectedBitset = applyANDOnBitSets( filtersData );
+ List<DocIdSet> filters = toDocIdSetList( filtersData );
+ DocIdBitSet expectedDocIdSet = new DocIdBitSet( expectedBitset );
+ DocIdSet testedSet = new AndDocIdSet( filters, 1500000 );
+ assertTrue( docIdSetsEqual(expectedDocIdSet, testedSet) );
+ }
+
+ private static List<DocIdSet> toDocIdSetList(List<BitSet> filtersData) {
+ List<DocIdSet> docIdSets = new ArrayList<DocIdSet>( filtersData.size() );
+ for (BitSet bitSet : filtersData) {
+ docIdSets.add ( new DocIdBitSet(bitSet) );
+ }
+ return docIdSets;
+ }
+
+ public static void main(String[] args) throws IOException {
+ compareAndingPerformance( 8, 1000000, 1500000 );
+ compareAndingPerformance( 4, 1000000, 1500000 );
+ compareAndingPerformance( 2, 1000000, 1500000 );
+ compareAndingPerformance( 2, 100000000, 150000000 );
+ compareAndingPerformance( 4, 100000000, 150000000 );
+ compareAndingPerformance( 8, 100000000, 150000000 );
+ }
+
+ private static void compareAndingPerformance(final int listSize,
+ final int minBitsSize, final int maxBitsSize) throws IOException {
+ List<BitSet> filtersData = makeRandomBitSetList( 13L, listSize, minBitsSize, maxBitsSize );
+ DocIdSet andedByBitsResult = null;
+ DocIdSet andedByIterationResult = null;
+ {
+ long startTime = System.currentTimeMillis();
+ for ( int i=0; i<1000; i++ ) {
+ BitSet expectedBitset = applyANDOnBitSets( filtersData );
+ andedByBitsResult = new DocIdBitSet( expectedBitset );
+ // iteration is needed to have a fair comparison with other impl:
+ iterateOnResults( andedByBitsResult );
+ }
+ long totalTimeMs = System.currentTimeMillis() - startTime;
+ System.out.println( "Time to \"AND " + listSize +
+ " BitSets and iterate on results\" 1000 times: " +
+ totalTimeMs + "ms. (" +
+ minBitsSize +" minimum BitSet size)");
+ }
+ List<DocIdSet> docIdSetList = toDocIdSetList( filtersData );
+ {
+ long startTime = System.currentTimeMillis();
+ for ( int i=0; i<1000; i++ ) {
+ andedByIterationResult = new AndDocIdSet( docIdSetList, maxBitsSize );
+ // iteration is needed because the AND is been done lazily on iterator access:
+ iterateOnResults( andedByIterationResult );
+ }
+ long totalTimeMs = System.currentTimeMillis() - startTime;
+ System.out.println( "Time to \"use AndDocIdSet iterator on " + listSize +
+ " Filters and iterate on results\" 1000 times: " +
+ totalTimeMs + "ms. (" +
+ minBitsSize +" minimum BitSet size)");
+ }
+ System.out.println(" Results are same: " + docIdSetsEqual( andedByBitsResult, andedByIterationResult ) );
+ }
+
+ private static void iterateOnResults(DocIdSet docIdBitSet) throws IOException {
+ DocIdSetIterator iterator = docIdBitSet.iterator();
+ int currentDoc;
+ do {
+ currentDoc = iterator.nextDoc();
+ }
+ while ( currentDoc != DocIdSetIterator.NO_MORE_DOCS );
+ }
+
+ private static final BitSet applyANDOnBitSets(final List<BitSet> filtersData) {
+ BitSet andedBitSet = null;
+ for (BitSet bits : filtersData) {
+ if ( andedBitSet==null ) {
+ andedBitSet = (BitSet) bits.clone();
+ }
+ else {
+ andedBitSet.and( bits );
+ }
+ }
+ return andedBitSet;
+ }
+
+ private static List<BitSet> makeRandomBitSetList(final long randomSeed, final int listSize,
+ final int minBitsSize, final int maxBitsSize) {
+ Random r = new Random( randomSeed ); //have a fixed Seed for repeatable tests
+ List<BitSet> restulList = new ArrayList<BitSet>( listSize );
+ for (int i=0; i<listSize; i++) {
+ int arraySize = minBitsSize + r.nextInt( maxBitsSize-minBitsSize );
+ restulList.add( makeRandomBitSet( r, arraySize) );
+ }
+ return restulList;
+ }
+
+ private static BitSet makeRandomBitSet(final Random randomSource, final int maxSize){
+ BitSet bitSet = new BitSet();
+ for ( int datai=0; datai<maxSize; datai++ ) {
+ // each bit has 50% change to be set:
+ if ( randomSource.nextBoolean() ) bitSet.set( datai );
+ }
+ return bitSet;
+ }
+
+ /**
+ * converts a list of Integers representing Document ids
+ * into a Lucene DocIdSet
+ * @param docIdList
+ * @return
+ */
+ public DocIdSet arrayToDocIdSet(List<Integer> docIdList) {
+ BitSet bitset = new BitSet();
+ for (int i : docIdList) {
+ bitset.set(i);
+ }
+ return new DocIdBitSet(bitset);
+ }
+
+ /**
+ * @param expected
+ * @param tested
+ * @return true if the two DocIdSet are equal: contain the same number of ids, same order and all are equal
+ */
+ public static final boolean docIdSetsEqual(DocIdSet expected, DocIdSet tested) {
+ try{
+ DocIdSetIterator iterA = expected.iterator();
+ DocIdSetIterator iterB = tested.iterator();
+ int nextA;
+ int nextB;
+ do {
+ nextA = iterA.nextDoc();
+ nextB = iterB.nextDoc();
+ if ( nextA != nextB ) {
+ return false;
+ }
+ assertEquals( iterA.docID(), iterB.docID() );
+ } while ( nextA != DocIdSetIterator.NO_MORE_DOCS );
+ }
+ catch (IOException ioe) {
+ fail( "these DocIdSetIterator instances should not throw any exceptions" );
+ }
+ return true;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/AndDocIdSetsTest.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/BestDriversFilter.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/BestDriversFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/BestDriversFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,49 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.util.OpenBitSet;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class BestDriversFilter extends Filter {
+
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );
+ TermDocs termDocs = reader.termDocs( new Term( "score", "5" ) );
+ while ( termDocs.next() ) {
+ bitSet.set( termDocs.doc() );
+ }
+ return bitSet;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/BestDriversFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Driver.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Driver.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Driver.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,129 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.util.Date;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.FullTextFilterDefs;
+import org.hibernate.search.annotations.FilterCacheModeType;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@FullTextFilterDefs( {
+ @FullTextFilterDef(name = "bestDriver", impl = BestDriversFilter.class, cache = FilterCacheModeType.NONE), //actual Filter implementation
+ @FullTextFilterDef(name = "security", impl = SecurityFilterFactory.class, cache = FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS), //Filter factory with parameters
+ @FullTextFilterDef(name = "cacheresultstest", impl = ExcludeAllFilterFactory.class, cache = FilterCacheModeType.INSTANCE_AND_DOCIDSETRESULTS),
+ @FullTextFilterDef(name = "cacheinstancetest", impl = InstanceBasedExcludeAllFilter.class, cache = FilterCacheModeType.INSTANCE_ONLY)
+})
+public class Driver {
+ @Id
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String name;
+ @Field(index= Index.UN_TOKENIZED)
+ private String teacher;
+ @Field(index= Index.UN_TOKENIZED)
+ private int score;
+ @Field(index= Index.UN_TOKENIZED)
+ @DateBridge( resolution = Resolution.YEAR)
+ private Date delivery;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getTeacher() {
+ return teacher;
+ }
+
+ public void setTeacher(String teacher) {
+ this.teacher = teacher;
+ }
+
+ public int getScore() {
+ return score;
+ }
+
+ public void setScore(int score) {
+ this.score = score;
+ }
+
+ public Date getDelivery() {
+ return delivery;
+ }
+
+ public void setDelivery(Date delivery) {
+ this.delivery = delivery;
+ }
+
+ public boolean equals(Object o) {
+ if ( this == o ) return true;
+ if ( o == null || getClass() != o.getClass() ) return false;
+
+ Driver driver = (Driver) o;
+
+ if ( id != driver.id ) return false;
+ if ( score != driver.score ) return false;
+ if ( delivery != null ? !delivery.equals( driver.delivery ) : driver.delivery != null ) return false;
+ if ( name != null ? !name.equals( driver.name ) : driver.name != null ) return false;
+ return !( teacher != null ? !teacher.equals( driver.teacher ) : driver.teacher != null );
+
+ }
+
+ public int hashCode() {
+ int result;
+ result = id;
+ result = 31 * result + ( name != null ? name.hashCode() : 0 );
+ result = 31 * result + ( teacher != null ? teacher.hashCode() : 0 );
+ result = 31 * result + score;
+ result = 31 * result + ( delivery != null ? delivery.hashCode() : 0 );
+ return result;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Driver.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilter.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.index.IndexReader;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ExcludeAllFilter extends Filter implements Serializable {
+
+ // ugly but useful for test purposes
+ private static volatile boolean done = false;
+
+ @Override
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
+ done = true;
+ return DocIdSet.EMPTY_DOCIDSET;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilterFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilterFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilterFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import org.apache.lucene.search.Filter;
+import org.hibernate.search.annotations.Factory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ExcludeAllFilterFactory {
+
+ @Factory
+ public Filter getFilter() {
+ return new ExcludeAllFilter();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/ExcludeAllFilterFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FilterTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FilterTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FilterTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,213 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.util.Calendar;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermRangeFilter;
+import org.hibernate.Session;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class FilterTest extends SearchTestCase {
+
+ public void testNamedFilters() {
+ createData();
+ FullTextSession s = Search.getFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term("teacher", "andre") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "max") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "aaron") ), BooleanClause.Occur.SHOULD );
+ FullTextQuery ftQuery = s.createFullTextQuery( query, Driver.class );
+ assertEquals("No filter should happen", 3, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.disableFullTextFilter( "bestDriver" ); //was not enabled, but should be harmless
+ ftQuery.enableFullTextFilter( "bestDriver" );
+ assertEquals("Should filter out Gavin", 2, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ assertEquals("Should filter to limit to Emmanuel", 1, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ ftQuery.disableFullTextFilter( "security");
+ ftQuery.disableFullTextFilter( "bestDriver");
+ assertEquals("Should not filter anymore", 3, ftQuery.getResultSize() );
+
+ s.getTransaction().commit();
+ s.close();
+ deleteData();
+ }
+
+ public void testCache() {
+ createData();
+ FullTextSession s = Search.getFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term("teacher", "andre") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "max") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "aaron") ), BooleanClause.Occur.SHOULD );
+ FullTextQuery ftQuery = s.createFullTextQuery( query, Driver.class );
+ assertEquals("No filter should happen", 3, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "cacheresultstest");
+ assertEquals("Should filter out all", 0, ftQuery.getResultSize() );
+
+ // HSEARCH-174 - we call System.gc() to force a garbage collection.
+ // Prior to the fix for HSEARCH-174 this would cause the filter to be
+ // garbage collected since Lucene used weak references.
+ System.gc();
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "cacheresultstest");
+ try {
+ ftQuery.getResultSize();
+ }
+ catch (IllegalStateException e) {
+ fail("Cache results does not work");
+ }
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "cacheinstancetest");
+ assertEquals("Should filter out all", 0, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "cacheinstancetest");
+ try {
+ ftQuery.getResultSize();
+ fail("Cache instance does not work");
+ }
+ catch (IllegalStateException e) {
+ //success
+ }
+
+ s.getTransaction().commit();
+ s.close();
+ deleteData();
+ }
+
+ public void testStraightFilters() {
+ createData();
+ FullTextSession s = Search.getFullTextSession( openSession( ) );
+ s.getTransaction().begin();
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term("teacher", "andre") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "max") ), BooleanClause.Occur.SHOULD );
+ query.add( new TermQuery( new Term("teacher", "aaron") ), BooleanClause.Occur.SHOULD );
+ FullTextQuery ftQuery;
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ Filter dateFilter = new TermRangeFilter("delivery", "2001", "2005", true, true);
+ ftQuery.setFilter( dateFilter );
+ assertEquals("Should select only liz", 1, ftQuery.getResultSize() );
+
+ ftQuery = s.createFullTextQuery( query, Driver.class );
+ ftQuery.setFilter( dateFilter );
+ ftQuery.enableFullTextFilter( "bestDriver");
+ ftQuery.enableFullTextFilter( "security").setParameter( "login", "andre" );
+ ftQuery.disableFullTextFilter( "security");
+ ftQuery.disableFullTextFilter( "bestDriver");
+ ftQuery.setFilter( null );
+ assertEquals("Should not filter anymore", 3, ftQuery.getResultSize() );
+
+ s.getTransaction().commit();
+ s.close();
+ deleteData();
+ }
+
+
+ private void deleteData() {
+ Session s = openSession( );
+ s.getTransaction().begin();
+ s.createQuery( "delete " + Driver.class.getName() + " t").executeUpdate();
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ private void createData() {
+ Session s = openSession( );
+ s.getTransaction().begin();
+ Calendar cal = Calendar.getInstance();
+ cal.set( 2006, 10, 11);
+ Driver driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 1 );
+ driver.setName( "Emmanuel" );
+ driver.setScore( 5 );
+ driver.setTeacher( "andre" );
+ s.persist( driver );
+
+ cal.set( 2007, 10, 11);
+ driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 2 );
+ driver.setName( "Gavin" );
+ driver.setScore( 3 );
+ driver.setTeacher( "aaron" );
+ s.persist( driver );
+
+ cal.set( 2004, 10, 11);
+ driver = new Driver();
+ driver.setDelivery( cal.getTime() );
+ driver.setId( 3 );
+ driver.setName( "Liz" );
+ driver.setScore( 5 );
+ driver.setTeacher( "max" );
+ s.persist( driver );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Driver.class,
+ Soap.class
+ };
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure(cfg);
+ cfg.setProperty( "hibernate.search.filter.cache_docidresults.size", "10" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FilterTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FiltersOptimizationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FiltersOptimizationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FiltersOptimizationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,202 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.List;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.util.DocIdBitSet;
+import org.apache.lucene.util.OpenBitSet;
+import org.hibernate.search.filter.FilterOptimizationHelper;
+
+import junit.framework.TestCase;
+
+/**
+ * Used to test org.hibernate.search.filter.FiltersOptimizationHelper
+ * @see org.hibernate.search.filter.FilterOptimizationHelper
+ * @author Sanne Grinovero
+ */
+public class FiltersOptimizationTest extends TestCase {
+
+ /**
+ * in some cases optimizations are not possible,
+ * test that mergeByBitAnds returns the same instance
+ * in that case.
+ */
+ public void testSkipMerging() {
+ List<DocIdSet> dataIn = new ArrayList<DocIdSet>( 3 );
+ dataIn.add( makeOpenBitSetTestSet( 1,2,3,5,8,9,10,11 ) );
+ dataIn.add( makeBitSetTestSet( 1,2,3,5,8,9,10,11,20 ) );
+ dataIn.add( makeAnonymousTestSet( 1,2,3,5,8,9,10,11 ) );
+ dataIn.add( makeAnonymousTestSet( 1,2,3,5,8,9,10,11,12 ) );
+ List<DocIdSet> merge = FilterOptimizationHelper.mergeByBitAnds( dataIn );
+ assertSame( dataIn, merge );
+ }
+
+ /**
+ * In case two filters are of OpenBitSet implementation,
+ * they should be AND-ed by using bit operations
+ * (rather than build the iterator).
+ * @throws IOException should not be thrown
+ */
+ public void testDoMergingOnOpenBitSet() throws IOException {
+ List<DocIdSet> dataIn = new ArrayList<DocIdSet>( 3 );
+ dataIn.add( makeOpenBitSetTestSet( 1,2,5,8,9,10,11 ) );
+ dataIn.add( makeOpenBitSetTestSet( 1,2,3,5,8,11 ) );
+ DocIdSet unmergedSet = makeAnonymousTestSet( 1,2,3,5,8,9,10,11 );
+ dataIn.add( unmergedSet );
+ List<DocIdSet> merge = FilterOptimizationHelper.mergeByBitAnds( dataIn );
+ assertNotSame( dataIn, merge );
+
+ assertEquals( 2, merge.size() );
+ assertSame( unmergedSet, merge.get( 0 ) );
+ assertTrue( isIdSetSequenceSameTo( merge.get( 1 ), 1,2,5,8,11 ) );
+ }
+
+ /**
+ * In case two filters are of OpenBitSet implementation,
+ * they should be AND-ed by using bit operations
+ * (rather than build the iterator).
+ * @throws IOException should be thrown
+ */
+ public void testDoMergingOnJavaBitSet() throws IOException {
+ List<DocIdSet> dataIn = new ArrayList<DocIdSet>( 3 );
+ dataIn.add( makeBitSetTestSet( 1,2,5,8,9,10,11 ) );
+ dataIn.add( makeBitSetTestSet( 1,2,3,5,8,11 ) );
+ DocIdSet unmergedSet = makeAnonymousTestSet( 1,2,3,5,8,9,10,11 );
+ dataIn.add( unmergedSet );
+ List<DocIdSet> merge = FilterOptimizationHelper.mergeByBitAnds( dataIn );
+ assertNotSame( dataIn, merge );
+
+ assertEquals( 2, merge.size() );
+ assertSame( unmergedSet, merge.get( 0 ) );
+ assertTrue( isIdSetSequenceSameTo( merge.get( 1 ), 1,2,5,8,11 ) );
+ }
+
+ /**
+ * Used to this test the testcase's helper method isIdSetSequenceSameTo
+ * @throws IOException
+ */
+ public void testSelfIdSequenceTester() throws IOException {
+ assertTrue( isIdSetSequenceSameTo(
+ makeOpenBitSetTestSet( 1,2,3,5,8,11 ),
+ 1,2,3,5,8,11 ) );
+ assertFalse( isIdSetSequenceSameTo(
+ makeOpenBitSetTestSet( 1,2,3,5,8 ),
+ 1,2,3,5,8,11 ) );
+ assertFalse( isIdSetSequenceSameTo(
+ makeOpenBitSetTestSet( 1,2,3,5,8,11 ),
+ 1,2,3,5,8 ) );
+ }
+
+ /**
+ * Verifies if the docIdSet is representing a specific
+ * sequence of docIds.
+ * @param docIdSet the docIdSet to test
+ * @param expectedIds an array of document ids
+ * @return true if iterating on docIdSet returns the expectedIds
+ * @throws IOException should not happen
+ */
+ private boolean isIdSetSequenceSameTo(DocIdSet docIdSet, int...expectedIds) throws IOException {
+ DocIdSetIterator idSetIterator = docIdSet.iterator();
+ for ( int setBit : expectedIds ) {
+ int currentId = idSetIterator.nextDoc();
+ if ( currentId == DocIdSetIterator.NO_MORE_DOCS ) {
+ return false;
+ }
+ if ( currentId != setBit ) {
+ return false;
+ }
+ }
+ // and now test both sequences are at the end:
+ return idSetIterator.nextDoc() == DocIdSetIterator.NO_MORE_DOCS;
+ }
+
+ /**
+ * test helper, makes an implementation of a DocIdSet
+ * @param docIds the ids it should contain
+ * @return
+ */
+ private DocIdSet makeAnonymousTestSet(int... docIds) {
+ DocIdSet idSet = makeOpenBitSetTestSet( docIds );
+ return new DocIdSetHiddenType( idSet );
+ }
+
+ /**
+ * test helper, makes a prefilled OpenBitSet
+ * @param enabledBits the ids it should contain
+ * @return a new OpenBitSet
+ */
+ private OpenBitSet makeOpenBitSetTestSet(int... enabledBits) {
+ OpenBitSet set = new OpenBitSet();
+ for (int position : enabledBits ) {
+ // a minimal check for input duplicates:
+ assertFalse( set.get( position ) );
+ set.set( position );
+ }
+ return set;
+ }
+
+ /**
+ * test helper, makes a prefilled DocIdBitSet
+ * using the java.lang.BitSet
+ * @see java.lang.BitSet
+ * @param enabledBits the ids it should contain
+ * @return a ne DocIdBitSet
+ */
+ private DocIdBitSet makeBitSetTestSet(int... enabledBits) {
+ BitSet set = new BitSet();
+ for (int position : enabledBits ) {
+ // a minimal check for input duplicates:
+ assertFalse( set.get( position ) );
+ set.set( position );
+ }
+ return new DocIdBitSet( set );
+ }
+
+ /**
+ * Implementation for testing: wraps a DocIdSet with a new type
+ * to make it not possible to cast/detect to the original type.
+ */
+ private static class DocIdSetHiddenType extends DocIdSet {
+
+ private final DocIdSet bitSet;
+
+ DocIdSetHiddenType(DocIdSet wrapped) {
+ this.bitSet = wrapped;
+ }
+
+ @Override
+ public DocIdSetIterator iterator() throws IOException {
+ return bitSet.iterator();
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/FiltersOptimizationTest.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,50 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.DocIdSet;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class InstanceBasedExcludeAllFilter extends Filter implements Serializable {
+
+ private volatile boolean done = false;
+
+ @Override
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
+ done = true;
+ return DocIdSet.EMPTY_DOCIDSET;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/SecurityFilterFactory.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/SecurityFilterFactory.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/SecurityFilterFactory.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryWrapperFilter;
+import org.apache.lucene.search.TermQuery;
+
+import org.hibernate.search.annotations.Factory;
+import org.hibernate.search.annotations.Key;
+import org.hibernate.search.filter.FilterKey;
+import org.hibernate.search.filter.StandardFilterKey;
+
+/**
+ * Apply a security filter to the results
+ *
+ * @author Emmanuel Bernard
+ */
+public class SecurityFilterFactory {
+ private String login;
+
+ /**
+ * injected parameter
+ */
+ public void setLogin(String login) {
+ this.login = login;
+ }
+
+ @Key
+ public FilterKey getKey() {
+ StandardFilterKey key = new StandardFilterKey();
+ key.addParameter( login );
+ return key;
+ }
+
+ @Factory
+ public Filter getFilter() {
+ Query query = new TermQuery( new Term("teacher", login) );
+ return new QueryWrapperFilter(query);
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/SecurityFilterFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Soap.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Soap.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Soap.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.filter;
+
+import javax.persistence.Id;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class Soap {
+ @Id @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String perfume;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getPerfume() {
+ return perfume;
+ }
+
+ public void setPerfume(String perfume) {
+ this.perfume = perfume;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/filter/Soap.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Animal.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Animal.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Animal.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed(index = "Animal")
+public class Animal {
+ @Id @GeneratedValue
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Animal.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/EmbeddedIdTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/EmbeddedIdTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/EmbeddedIdTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,120 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import java.util.List;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class EmbeddedIdTest extends SearchTestCase {
+ public void testFieldBridge() throws Exception {
+ PersonPK emmanuelPk = new PersonPK();
+ emmanuelPk.setFirstName( "Emmanuel" );
+ emmanuelPk.setLastName( "Bernard" );
+ Person emmanuel = new Person();
+ emmanuel.setFavoriteColor( "Blue" );
+ emmanuel.setId( emmanuelPk );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.save( emmanuel );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ List results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "id.lastName", "Bernard" ) )
+ ).list();
+ assertEquals( 1, results.size() );
+ emmanuel = ( Person ) results.get( 0 );
+ emmanuel.setFavoriteColor( "Red" );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "id.lastName", "Bernard" ) )
+ ).list();
+ assertEquals( 1, results.size() );
+ emmanuel = ( Person ) results.get( 0 );
+ assertEquals( "Red", emmanuel.getFavoriteColor() );
+ s.delete( results.get( 0 ) );
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * HSEARCH-HSEARCH-306, HSEARCH-248
+ *
+ * @throws Exception throws exception in case the test fails.
+ */
+ public void testSafeFromTupleId() throws Exception {
+ PersonPK emmanuelPk = new PersonPK();
+ emmanuelPk.setFirstName( "Emmanuel" );
+ emmanuelPk.setLastName( "Bernard" );
+ Person emmanuel = new Person();
+ emmanuel.setFavoriteColor( "Blue" );
+ emmanuel.setId( emmanuelPk );
+
+ PersonPK johnPk = new PersonPK();
+ johnPk.setFirstName( "John" );
+ johnPk.setLastName( "Doe" );
+ Person john = new Person();
+ john.setFavoriteColor( "Blue" );
+ john.setId( johnPk );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.save( emmanuel );
+ s.save( john );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ // we need a query which has at least the size of two.
+ List results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "favoriteColor", "blue" ) )
+ ).list();
+ assertEquals( 2, results.size() );
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Person.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/EmbeddedIdTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/ImplicitIdTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/ImplicitIdTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/ImplicitIdTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import java.util.List;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class ImplicitIdTest extends SearchTestCase {
+
+ /**
+ * Tests that @DocumentId is optional. See HSEARCH-104.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testImplicitDocumentId() throws Exception {
+ Animal dog = new Animal();
+ dog.setName( "Dog" );
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.save( dog );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ List results = Search.getFullTextSession( s ).createFullTextQuery(
+ new TermQuery( new Term( "name", "dog" ) )
+ ).list();
+ assertEquals( 1, results.size() );
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Animal.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/ImplicitIdTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Person.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Person.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Person.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import javax.persistence.Entity;
+import javax.persistence.EmbeddedId;
+
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Person {
+ @EmbeddedId
+ @FieldBridge(impl = PersonPKBridge.class)
+ @DocumentId
+ private PersonPK id;
+ private String favoriteColor;
+
+ public PersonPK getId() {
+ return id;
+ }
+
+ public void setId(PersonPK id) {
+ this.id = id;
+ }
+
+ @Field
+ public String getFavoriteColor() {
+ return favoriteColor;
+ }
+
+ public void setFavoriteColor(String favoriteColor) {
+ this.favoriteColor = favoriteColor;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/Person.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPK.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPK.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPK.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,72 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import java.io.Serializable;
+import javax.persistence.Embeddable;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Embeddable
+public class PersonPK implements Serializable {
+ private String firstName;
+ private String lastName;
+
+ public String getLastName() {
+ return lastName;
+ }
+
+ public void setLastName(String lastName) {
+ this.lastName = lastName;
+ }
+
+ public String getFirstName() {
+ return firstName;
+ }
+
+ public void setFirstName(String firstName) {
+ this.firstName = firstName;
+ }
+
+ public boolean equals(Object o) {
+ if ( this == o ) return true;
+ if ( !( o instanceof PersonPK ) ) return false;
+
+ PersonPK personPK = (PersonPK) o;
+
+ if ( firstName != null ? !firstName.equals( personPK.firstName ) : personPK.firstName != null ) return false;
+ if ( lastName != null ? !lastName.equals( personPK.lastName ) : personPK.lastName != null ) return false;
+
+ return true;
+ }
+
+ public int hashCode() {
+ int result;
+ result = ( firstName != null ? firstName.hashCode() : 0 );
+ result = 31 * result + ( lastName != null ? lastName.hashCode() : 0 );
+ return result;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPK.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPKBridge.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPKBridge.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPKBridge.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+import org.hibernate.search.bridge.LuceneOptions;
+import org.hibernate.search.bridge.TwoWayFieldBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class PersonPKBridge implements TwoWayFieldBridge {
+
+ public Object get(String name, Document document) {
+ PersonPK id = new PersonPK();
+ Field field = document.getField( name + ".firstName" );
+ id.setFirstName( field.stringValue() );
+ field = document.getField( name + ".lastName" );
+ id.setLastName( field.stringValue() );
+ return id;
+ }
+
+ public String objectToString(Object object) {
+ PersonPK id = ( PersonPK ) object;
+ StringBuilder sb = new StringBuilder();
+ sb.append( id.getFirstName() ).append( " " ).append( id.getLastName() );
+ return sb.toString();
+ }
+
+ public void set(String name, Object value, Document document, LuceneOptions luceneOptions) {
+ PersonPK id = ( PersonPK ) value;
+
+ //store each property in a unique field
+ Field field = new Field(
+ name + ".firstName",
+ id.getFirstName(),
+ luceneOptions.getStore(),
+ luceneOptions.getIndex(),
+ luceneOptions.getTermVector()
+ );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+
+ field = new Field(
+ name + ".lastName",
+ id.getLastName(),
+ luceneOptions.getStore(),
+ luceneOptions.getIndex(),
+ luceneOptions.getTermVector()
+ );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+
+ //store the unique string representation in the named field
+ field = new Field(
+ name,
+ objectToString( id ),
+ luceneOptions.getStore(),
+ luceneOptions.getIndex(),
+ luceneOptions.getTermVector()
+ );
+ field.setBoost( luceneOptions.getBoost() );
+ document.add( field );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/PersonPKBridge.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ManualTransactionContext.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ManualTransactionContext.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ManualTransactionContext.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id.providedId;
+
+import java.util.List;
+import java.util.ArrayList;
+import javax.transaction.Synchronization;
+import javax.transaction.Status;
+
+import org.hibernate.search.backend.TransactionContext;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ManualTransactionContext implements TransactionContext {
+ private boolean progress = true;
+ private List<Synchronization> syncs = new ArrayList<Synchronization>();
+
+ public boolean isTransactionInProgress() {
+ return progress;
+ }
+
+ public Object getTransactionIdentifier() {
+ return this;
+ }
+
+ public void registerSynchronization(Synchronization synchronization) {
+ syncs.add(synchronization);
+ }
+
+ public void end() {
+ this.progress = false;
+ for (Synchronization sync : syncs) {
+ sync.beforeCompletion();
+ }
+
+ for (Synchronization sync : syncs) {
+ sync.afterCompletion( Status.STATUS_COMMITTED );
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ManualTransactionContext.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPerson.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPerson.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPerson.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,81 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id.providedId;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.ProvidedId;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.search.bridge.builtin.LongBridge;
+
+import java.io.Serializable;
+
+
+/**
+ * @author Navin Surtani
+ */
+//@Entity
+@ProvidedId(bridge = @FieldBridge(impl = LongBridge.class) )
+@Indexed
+public class ProvidedIdPerson implements Serializable {
+
+// @Id
+// @GeneratedValue
+ private long id;
+
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ private String name;
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ private String blurb;
+ @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ private int age;
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getBlurb() {
+ return blurb;
+ }
+
+ public void setBlurb(String blurb) {
+ this.blurb = blurb;
+ }
+
+ public int getAge() {
+ return age;
+ }
+
+ public void setAge(int age) {
+ this.age = age;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPerson.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPersonSub.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPersonSub.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPersonSub.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,34 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id.providedId;
+
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Navin Surtani (<a href="mailto:nsurtani@redhat.com">nsurtani(a)redhat.com</a>)
+ */
+@Indexed
+public class ProvidedIdPersonSub extends ProvidedIdPerson {
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdPersonSub.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,84 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id.providedId;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
+import org.hibernate.search.backend.Work;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Navin Surtani
+ */
+public class ProvidedIdTest extends junit.framework.TestCase {
+
+ public void testProvidedId() throws Exception {
+ SearchFactoryImplementor sf = new SearchFactoryImpl( new StandaloneConf() );
+
+ ProvidedIdPerson person1 = new ProvidedIdPerson();
+ person1.setName( "Big Goat" );
+ person1.setBlurb( "Eats grass" );
+
+ ProvidedIdPerson person2 = new ProvidedIdPerson();
+ person2.setName( "Mini Goat" );
+ person2.setBlurb( "Eats cheese" );
+
+ ProvidedIdPersonSub person3 = new ProvidedIdPersonSub();
+ person3.setName( "Regular goat" );
+ person3.setBlurb( "Is anorexic" );
+
+ ManualTransactionContext tc = new ManualTransactionContext();
+
+ Work<ProvidedIdPerson> work = new Work<ProvidedIdPerson>( person1, 1, WorkType.INDEX );
+ sf.getWorker().performWork( work, tc );
+ work = new Work<ProvidedIdPerson>( person2, 2, WorkType.INDEX );
+ sf.getWorker().performWork( work, tc );
+ Work<ProvidedIdPersonSub> work2 = new Work<ProvidedIdPersonSub>( person3, 3, WorkType.INDEX );
+ sf.getWorker().performWork( work2, tc );
+
+ tc.end();
+
+ QueryParser parser = new QueryParser( SearchTestCase.getTargetLuceneVersion(), "name", SearchTestCase.standardAnalyzer );
+ Query luceneQuery = parser.parse( "Goat" );
+
+ //we cannot use FTQuery because @ProvidedId does not provide the getter id and Hibernate Hsearch Query extension
+ //needs it. So we use plain Lucene
+
+ //we know there is only one DP
+ DirectoryProvider provider = sf
+ .getDirectoryProviders( ProvidedIdPerson.class )[0];
+ IndexSearcher searcher = new IndexSearcher( provider.getDirectory(), true );
+ TopDocs hits = searcher.search( luceneQuery, 1000 );
+ assertEquals( 3, hits.totalHits );
+ searcher.close();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/StandaloneConf.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/StandaloneConf.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/StandaloneConf.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,82 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.id.providedId;
+
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+
+import org.hibernate.search.cfg.SearchConfiguration;
+import org.hibernate.search.cfg.SearchMapping;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.Environment;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class StandaloneConf implements SearchConfiguration {
+ final Map<String,Class<?>> classes;
+ final Properties properties;
+
+ public StandaloneConf() {
+ classes = new HashMap<String,Class<?>>(2);
+ classes.put( ProvidedIdPerson.class.getName(), ProvidedIdPerson.class );
+ classes.put( ProvidedIdPersonSub.class.getName(), ProvidedIdPersonSub.class );
+
+ properties = new Properties( );
+ properties.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ properties.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ properties.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
+ properties.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
+ }
+
+ public Iterator<Class<?>> getClassMappings() {
+ return classes.values().iterator();
+ }
+
+ public Class<?> getClassMapping(String name) {
+ return classes.get( name );
+ }
+
+ public String getProperty(String propertyName) {
+ return properties.getProperty( propertyName );
+ }
+
+ public Properties getProperties() {
+ return properties;
+ }
+
+ public ReflectionManager getReflectionManager() {
+ return null;
+ }
+
+ public SearchMapping getProgrammaticMapping() {
+ return null;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/id/providedId/StandaloneConf.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/ManualIndexingStrategyTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/ManualIndexingStrategyTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/ManualIndexingStrategyTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,92 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.indexingStrategy;
+
+import org.apache.lucene.index.IndexReader;
+
+import org.hibernate.Session;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.test.AlternateDocument;
+import org.hibernate.search.test.Document;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.Environment;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ManualIndexingStrategyTest extends SearchTestCase {
+
+ public void testMultipleEntitiesPerIndex() throws Exception {
+
+ Session s = getSessions().openSession();
+ s.getTransaction().begin();
+ Document document =
+ new Document( "Hibernate in Action", "Object/relational mapping with Hibernate", "blah blah blah" );
+ s.persist( document );
+ s.flush();
+ s.persist(
+ new AlternateDocument(
+ document.getId(),
+ "Hibernate in Action",
+ "Object/relational mapping with Hibernate",
+ "blah blah blah"
+ )
+ );
+ s.getTransaction().commit();
+ s.close();
+
+ assertEquals( 0, getDocumentNbr() );
+
+ s = getSessions().openSession();
+ s.getTransaction().begin();
+ s.delete( s.get( AlternateDocument.class, document.getId() ) );
+ s.delete( s.createCriteria( Document.class ).uniqueResult() );
+ s.getTransaction().commit();
+ s.close();
+ }
+
+ private int getDocumentNbr() throws Exception {
+ IndexReader reader = IndexReader.open( getDirectory( Document.class ), false );
+ try {
+ return reader.numDocs();
+ }
+ finally {
+ reader.close();
+ }
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Document.class,
+ AlternateDocument.class
+ };
+ }
+
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.INDEXING_STRATEGY, "manual" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/indexingStrategy/ManualIndexingStrategyTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Animal.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Animal.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Animal.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public abstract class Animal extends Being {
+ private Long id;
+ private String name;
+
+ @Id @GeneratedValue @DocumentId
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ @Field(index= Index.TOKENIZED, store= Store.YES)
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Animal.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Being.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Being.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Being.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,50 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import javax.persistence.MappedSuperclass;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.FieldBridge;
+import org.hibernate.search.test.bridge.PaddedIntegerBridge;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@MappedSuperclass
+public class Being {
+ @Field(index = Index.UN_TOKENIZED)
+ @FieldBridge(impl = PaddedIntegerBridge.class)
+ private int weight;
+
+ public int getWeight() {
+ return weight;
+ }
+
+ public void setWeight(int weight) {
+ this.weight = weight;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Being.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Bird.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Bird.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Bird.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class Bird extends Animal {
+
+ private int numberOfEggs;
+
+ @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ public int getNumberOfEggs() {
+ return numberOfEggs;
+ }
+
+ public void setNumberOfEggs(int numberOfEggs) {
+ this.numberOfEggs = numberOfEggs;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Bird.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Eagle.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Eagle.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Eagle.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,56 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class Eagle extends Bird {
+
+ private WingType wingYype;
+
+ @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ public WingType getWingYype() {
+ return wingYype;
+ }
+
+ public void setWingYype(WingType wingYype) {
+ this.wingYype = wingYype;
+ }
+
+ public enum WingType {
+ BROAD,
+ LONG
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Eagle.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Fish.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Fish.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Fish.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+public class Fish extends Animal {
+
+ private int numberOfDorsalFins;
+
+ @Field(index = Index.UN_TOKENIZED, store = Store.YES)
+ public int getNumberOfDorsalFins() {
+ return numberOfDorsalFins;
+ }
+
+ public void setNumberOfDorsalFins(int numberOfDorsalFins) {
+ this.numberOfDorsalFins = numberOfDorsalFins;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Fish.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/InheritanceTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/InheritanceTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/InheritanceTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,315 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import java.util.List;
+import java.io.Serializable;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermRangeQuery;
+import org.slf4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.util.LoggerFactory;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class InheritanceTest extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ protected void setUp() throws Exception {
+ super.setUp();
+ }
+
+ public void testSearchUnindexClass() throws Exception {
+ createTestData();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse( "Elephant" );
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ try {
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, String.class );
+ hibQuery.list();
+ tx.commit();
+ fail();
+ }
+ catch ( IllegalArgumentException iae ) {
+ log.debug( "success" );
+ }
+
+ tx = s.beginTransaction();
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Mammal.class );
+ assertItsTheElephant( hibQuery.list() );
+ tx.commit();
+
+ s.close();
+ }
+
+ public void testInheritance() throws Exception {
+ createTestData();
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse( "Elephant" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Mammal.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ query = parser.parse( "Elephant" );
+ hibQuery = s.createFullTextQuery( query);
+ assertItsTheElephant( hibQuery.list() );
+
+ query = parser.parse( "hasSweatGlands:false" );
+ hibQuery = s.createFullTextQuery( query, Animal.class, Mammal.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ query = parser.parse( "Elephant OR White Pointer" );
+ hibQuery = s.createFullTextQuery( query, Being.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query filtering on superclass return mapped subclasses", 2, result.size() );
+
+ query = new TermRangeQuery( "weight", "04000", "05000", true, true );
+ hibQuery = s.createFullTextQuery( query, Animal.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ query = parser.parse( "Elephant" );
+ hibQuery = s.createFullTextQuery( query, Being.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ tx.commit();
+ s.close();
+ }
+
+
+ public void testPolymorphicQueries() throws Exception {
+ createTestData();
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse( "Elephant" );
+
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Mammal.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ hibQuery = s.createFullTextQuery( query, Animal.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ hibQuery = s.createFullTextQuery( query, Being.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ hibQuery = s.createFullTextQuery( query, Object.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ hibQuery = s.createFullTextQuery( query, Serializable.class );
+ assertItsTheElephant( hibQuery.list() );
+
+ hibQuery = s.createFullTextQuery(
+ query, Mammal.class, Animal.class, Being.class, Object.class, Serializable.class
+ );
+ assertItsTheElephant( hibQuery.list() );
+
+ tx.commit();
+ s.close();
+ }
+
+ public void testSubclassInclusion() throws Exception {
+ createTestData();
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Query query = new TermQuery( new Term( "numberOfEggs", "2" ) );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Eagle.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of hits. There should be two birds.", 1, result.size() );
+
+ query = new TermQuery( new Term( "numberOfEggs", "2" ) );
+ hibQuery = s.createFullTextQuery( query, Bird.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of hits. There should be two birds.", 2, result.size() );
+
+ query = new TermQuery( new Term( "numberOfEggs", "2" ) );
+ hibQuery = s.createFullTextQuery( query, Mammal.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of hits. There should be two birds.", 0, result.size() );
+
+ try {
+ query = new TermQuery( new Term( "numberOfEggs", "2" ) );
+ hibQuery = s.createFullTextQuery( query, String.class );
+ hibQuery.list();
+ fail();
+ }
+ catch ( IllegalArgumentException iae ) {
+ log.debug( "success" );
+ }
+
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * Tests that purging the index of a class also purges the index of the subclasses. See also HSEARCH-262.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testPurgeIndex() throws Exception {
+ createTestData();
+ FullTextSession s = Search.getFullTextSession( openSession() );
+
+ Transaction tx = s.beginTransaction();
+ assertNumberOfAnimals( s, 5 );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ s.purgeAll( Serializable.class );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ assertNumberOfAnimals( s, 3 );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ s.purgeAll( Bird.class );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ assertNumberOfAnimals( s, 1 );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ s.purgeAll( Object.class );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ assertNumberOfAnimals( s, 0 );
+ tx.commit();
+
+ s.close();
+ }
+
+ /**
+ * Tests that purging the an uninexed class triggers an exception.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testPurgeUnIndexClass() throws Exception {
+ createTestData();
+ FullTextSession s = Search.getFullTextSession( openSession() );
+
+ Transaction tx = s.beginTransaction();
+ assertNumberOfAnimals( s, 5 );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ try {
+ s.purgeAll( String.class );
+ tx.commit();
+ fail();
+ }
+ catch ( IllegalArgumentException iae ) {
+ log.debug( "Success" );
+ }
+ s.close();
+ }
+
+ private void assertNumberOfAnimals(FullTextSession s, int count) throws Exception {
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse( "Elephant OR White Pointer OR Chimpanzee OR Dove or Eagle" );
+ List result = s.createFullTextQuery( query, Animal.class ).list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of hits. There should be one elephant and one shark.", count, result.size() );
+ }
+
+ private void createTestData() {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ Fish shark = new Fish();
+ shark.setName( "White Pointer" );
+ shark.setNumberOfDorsalFins( 2 );
+ shark.setWeight( 1500 );
+ s.save( shark );
+
+ Mammal elephant = new Mammal();
+ elephant.setName( "Elephant" );
+ elephant.setHasSweatGlands( false );
+ elephant.setWeight( 4500 );
+ s.save( elephant );
+
+ Mammal chimp = new Mammal();
+ chimp.setName( "Chimpanzee" );
+ chimp.setHasSweatGlands( true );
+ chimp.setWeight( 50 );
+ s.save( chimp );
+
+ Bird dove = new Bird();
+ dove.setName( "Dove" );
+ dove.setNumberOfEggs( 2 );
+ s.save( dove );
+
+ Eagle eagle = new Eagle();
+ eagle.setName( "Bald Eagle" );
+ eagle.setNumberOfEggs( 2 );
+ eagle.setWingYype( Eagle.WingType.BROAD );
+ s.save( eagle );
+
+ tx.commit();
+ s.clear();
+ }
+
+ private void assertItsTheElephant(List result) {
+ assertNotNull( result );
+ assertEquals( "Wrong number of results", 1, result.size() );
+ assertTrue( "Wrong result type", result.get( 0 ) instanceof Mammal );
+ Mammal mammal = ( Mammal ) result.get( 0 );
+ assertEquals( "Wrong animal name", "Elephant", mammal.getName() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Animal.class,
+ Mammal.class,
+ Fish.class,
+ Bird.class,
+ Eagle.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/InheritanceTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Mammal.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Mammal.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Mammal.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.inheritance;
+
+import java.io.Serializable;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Mammal extends Animal implements Serializable {
+ private boolean hasSweatGlands;
+
+ @Field(index= Index.UN_TOKENIZED, store= Store.YES)
+ public boolean isHasSweatGlands() {
+ return hasSweatGlands;
+ }
+
+ public void setHasSweatGlands(boolean hasSweatGlands) {
+ this.hasSweatGlands = hasSweatGlands;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/inheritance/Mammal.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/JGroupsCommonTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/JGroupsCommonTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/JGroupsCommonTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,151 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.common;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.HibernateException;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.backend.impl.jgroups.JGroupsBackendQueueProcessorFactory;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.jgroups.master.TShirt;
+
+/**
+ * In case of running test outside Hibernate Search Maven configuration set following VM configuration:
+ * <br><br>
+ * <code>
+ * -Djava.net.preferIPv4Stack=true -Djgroups.bind_addr=127.0.0.1
+ * </code>
+ * @author Lukasz Moren
+ */
+
+public class JGroupsCommonTest extends MultipleSessionsSearchTestCase {
+
+ public static final String CHANNEL_NAME = "jgroups_test_channel";
+ private static final String DEFAULT_JGROUPS_CONFIGURATION_FILE = "flush-udp.xml";
+
+ public void testJGroupsBackend() throws Exception {
+
+ //get slave session
+ Session s = getSlaveSession();
+ Transaction tx = s.beginTransaction();
+ TShirt ts = new TShirt();
+ ts.setLogo( "Boston" );
+ ts.setSize( "XXL" );
+ TShirt ts2 = new TShirt();
+ ts2.setLogo( "Mapple leaves" );
+ ts2.setSize( "L" );
+ s.persist( ts );
+ s.persist( ts2 );
+ tx.commit();
+
+ Thread.sleep( 3000 );
+
+ FullTextSession ftSess = Search.getFullTextSession( openSession() );
+ ftSess.getTransaction().begin();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ Query luceneQuery = parser.parse( "logo:Boston or logo:Mapple leaves" );
+ org.hibernate.Query query = ftSess.createFullTextQuery( luceneQuery );
+ List result = query.list();
+
+ assertEquals( 2, result.size() );
+
+ s = getSlaveSession();
+ tx = s.beginTransaction();
+ ts = ( TShirt ) s.get( TShirt.class, ts.getId() );
+ ts.setLogo( "Peter pan" );
+ tx.commit();
+
+ //need to sleep for the message consumption
+ Thread.sleep( 3000 );
+
+ parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ luceneQuery = parser.parse( "logo:Peter pan" );
+ query = ftSess.createFullTextQuery( luceneQuery );
+ result = query.list();
+ assertEquals( 1, result.size() );
+
+ s = getSlaveSession();
+ tx = s.beginTransaction();
+ s.delete( s.get( TShirt.class, ts.getId() ) );
+ s.delete( s.get( TShirt.class, ts2.getId() ) );
+ tx.commit();
+
+ //Need to sleep for the message consumption
+ Thread.sleep( 3000 );
+
+ parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ luceneQuery = parser.parse( "logo:Boston or logo:Mapple leaves" );
+ query = ftSess.createFullTextQuery( luceneQuery );
+ result = query.list();
+ assertEquals( 0, result.size() );
+
+ ftSess.close();
+ s.close();
+
+ }
+
+ @Override
+ protected void configure(Configuration cfg) {
+ //master jgroups configuration
+ super.configure( cfg );
+ cfg.setProperty( Environment.WORKER_BACKEND, "jgroupsMaster" );
+ cfg.setProperty( JGroupsBackendQueueProcessorFactory.CONFIGURATION_FILE, DEFAULT_JGROUPS_CONFIGURATION_FILE );
+ }
+
+ @Override
+ protected void commonConfigure(Configuration cfg) {
+ //slave jgroups configuration
+ super.commonConfigure( cfg );
+ cfg.setProperty( Environment.WORKER_BACKEND, "jgroupsSlave" );
+ cfg.setProperty( JGroupsBackendQueueProcessorFactory.CONFIGURATION_FILE, DEFAULT_JGROUPS_CONFIGURATION_FILE );
+ }
+
+ public static Session getSession() throws HibernateException {
+ return sessions.openSession();
+ }
+
+ @Override
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+
+ protected Class<?>[] getCommonMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/JGroupsCommonTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/MultipleSessionsSearchTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/MultipleSessionsSearchTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/MultipleSessionsSearchTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,174 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.common;
+
+import java.io.InputStream;
+
+import org.slf4j.Logger;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.classic.Session;
+import org.hibernate.dialect.Dialect;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * Test class to simulate clustered environment (one master, and one slave node)
+ *
+ * @author Lukasz Moren
+ */
+public abstract class MultipleSessionsSearchTestCase extends SearchTestCase {
+
+ private static final Logger log = org.hibernate.search.util.LoggerFactory.make();
+
+ private String masterCopy = "/master/copy";
+
+ /**
+ * The lucene index directory which is specific to the master node.
+ */
+ private String masterMain = "/master/main";
+
+ /**
+ * The lucene index directory which is specific to the slave node.
+ */
+ private String slave = "/slave";
+
+
+ protected static SessionFactory slaveSessionFactory;
+
+ /**
+ * Common configuration for all slave nodes
+ */
+ private Configuration commonCfg;
+
+ @Override
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+
+ //master
+ cfg.setProperty( "hibernate.search.default.sourceBase", getIndexDir().getAbsolutePath() + masterCopy );
+ cfg.setProperty( "hibernate.search.default.indexBase", getIndexDir().getAbsolutePath() + masterMain );
+ cfg.setProperty( "hibernate.search.default.refresh", "1" );
+ cfg.setProperty(
+ "hibernate.search.default.directory_provider", "org.hibernate.search.store.FSMasterDirectoryProvider"
+ );
+ }
+
+ protected void commonConfigure(Configuration cfg) {
+ super.configure( cfg );
+
+ //slave(s)
+ cfg.setProperty( "hibernate.search.default.sourceBase", getIndexDir().getAbsolutePath() + masterCopy );
+ cfg.setProperty( "hibernate.search.default.indexBase", getIndexDir().getAbsolutePath() + slave );
+ cfg.setProperty( "hibernate.search.default.refresh", "1" );
+ cfg.setProperty(
+ "hibernate.search.default.directory_provider", "org.hibernate.search.store.FSSlaveDirectoryProvider"
+ );
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ if ( getIndexDir().exists() ) {
+ FileHelper.delete( getIndexDir() );
+ }
+ super.setUp();
+ buildCommonSessionFactory( getCommonMappings(), getCommonAnnotatedPackages(), getCommonXmlFiles() );
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+
+ //close session factories and clean index files
+ if ( slaveSessionFactory != null ) {
+ slaveSessionFactory.close();
+ }
+ if ( getSessions() != null ) {
+ getSessions().close();
+ }
+ log.info( "Deleting test directory {} ", getIndexDir().getAbsolutePath() );
+ FileHelper.delete( getIndexDir() );
+ }
+
+ private void buildCommonSessionFactory(Class<?>[] classes, String[] packages, String[] xmlFiles) throws Exception {
+ try {
+ if ( getSlaveSessionFactory() != null ) {
+ getSlaveSessionFactory().close();
+ }
+
+ setCommonCfg( new AnnotationConfiguration() );
+ commonConfigure( commonCfg );
+ if ( recreateSchema() ) {
+ commonCfg.setProperty( org.hibernate.cfg.Environment.HBM2DDL_AUTO, "create-drop" );
+ }
+ for ( String aPackage : packages ) {
+ ( ( AnnotationConfiguration ) getCommonConfiguration() ).addPackage( aPackage );
+ }
+ for ( Class<?> aClass : classes ) {
+ ( ( AnnotationConfiguration ) getCommonConfiguration() ).addAnnotatedClass( aClass );
+ }
+ for ( String xmlFile : xmlFiles ) {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFile );
+ getCommonConfiguration().addInputStream( is );
+ }
+ setDialect( Dialect.getDialect() );
+ slaveSessionFactory = getCommonConfiguration().buildSessionFactory();
+ }
+ catch ( Exception e ) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+
+ private void setCommonCfg(Configuration configuration) {
+ this.commonCfg = configuration;
+ }
+
+ protected Configuration getCommonConfiguration() {
+ return commonCfg;
+ }
+
+ protected Session getSlaveSession() {
+ return slaveSessionFactory.openSession();
+ }
+
+ protected static SessionFactory getSlaveSessionFactory() {
+ return slaveSessionFactory;
+ }
+
+ private String[] getCommonAnnotatedPackages() {
+ return new String[] { };
+ }
+
+ private String[] getCommonXmlFiles() {
+ return new String[] { };
+ }
+
+ protected abstract Class<?>[] getMappings();
+
+ protected abstract Class<?>[] getCommonMappings();
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/common/MultipleSessionsSearchTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/JGroupsMasterTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/JGroupsMasterTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/JGroupsMasterTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,193 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.master;
+
+import java.io.Serializable;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.jgroups.JChannel;
+import org.jgroups.Message;
+
+import org.hibernate.HibernateException;
+import org.hibernate.Session;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.impl.jgroups.JGroupsBackendQueueProcessorFactory;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.jms.master.TShirt;
+
+/**
+ * Tests that the Master node in a JGroups cluster can properly process messages received from channel.
+ * <p/>
+ * In case of running test outside Hibernate Search Maven configuration set following VM configuration:
+ * <br><br>
+ * <code>
+ * -Djava.net.preferIPv4Stack=true -Djgroups.bind_addr=127.0.0.1
+ * </code>
+ *
+ * @author Lukasz Moren
+ */
+public class JGroupsMasterTest extends SearchTestCase {
+
+ /**
+ * Name of the JGroups channel used in test
+ */
+ public static final String CHANNEL_NAME = "jgroups_test_channel";
+
+ private JChannel channel;
+
+ public void testMessageSending() throws Exception {
+
+ TShirt shirt = createObjectWithSQL();
+ List<LuceneWork> queue = createDocumentAndWorkQueue( shirt );
+
+ sendMessage( queue );
+
+ Thread.sleep( 3000 );
+
+ FullTextSession ftSess = Search.getFullTextSession( openSession() );
+ ftSess.getTransaction().begin();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ Query luceneQuery = parser.parse( "logo:jboss" );
+ org.hibernate.Query query = ftSess.createFullTextQuery( luceneQuery );
+ List result = query.list();
+ assertEquals( 1, result.size() );
+ ftSess.delete( result.get( 0 ) );
+ ftSess.getTransaction().commit();
+ ftSess.close();
+ }
+
+ private void prepareJGroupsChannel() throws Exception {
+ channel = new JChannel( prepareJGroupsCongigurationString() );
+ channel.connect( CHANNEL_NAME );
+ }
+
+ private void sendMessage(List<LuceneWork> queue) throws Exception {
+ //send message to all listeners
+ Message message = new Message( null, null, ( Serializable ) queue );
+ channel.send( message );
+ }
+
+ /**
+ * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing.
+ *
+ * @param shirt The shirt to index
+ *
+ * @return A manually create <code>LuceneWork</code> list.
+ */
+ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) {
+ Document doc = new Document();
+ Field field = new Field(
+ DocumentBuilder.CLASS_FIELDNAME, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED
+ );
+ doc.add( field );
+ field = new Field( "id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED );
+ doc.add( field );
+ field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED );
+ doc.add( field );
+ LuceneWork luceneWork = new AddLuceneWork(
+ shirt.getId(), String.valueOf( shirt.getId() ), shirt.getClass(), doc
+ );
+ List<LuceneWork> queue = new ArrayList<LuceneWork>();
+ queue.add( luceneWork );
+ return queue;
+ }
+
+ /**
+ * Create a test object without trigggering indexing. Use SQL directly.
+ *
+ * @return a <code>TShirt</code> test object.
+ *
+ * @throws java.sql.SQLException in case the inset fails.
+ */
+ @SuppressWarnings({ "deprecation" })
+ private TShirt createObjectWithSQL() throws SQLException {
+ Session s = openSession();
+ s.getTransaction().begin();
+ Statement statement = s.connection().createStatement();
+ statement.executeUpdate(
+ "insert into TShirt_Master(id, logo, size_) values( '1', 'JBoss balls', 'large')"
+ );
+ statement.close();
+ TShirt ts = ( TShirt ) s.get( TShirt.class, 1 );
+ s.getTransaction().commit();
+ s.close();
+ return ts;
+ }
+
+ public static Session getSession() throws HibernateException {
+ return sessions.openSession();
+ }
+
+ protected void setUp() throws Exception {
+ prepareJGroupsChannel();
+ super.setUp();
+ }
+
+ protected void tearDown() throws Exception {
+ channel.close();
+ super.tearDown();
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ // JGroups configuration for master node
+ cfg.setProperty( Environment.WORKER_BACKEND, "jgroupsMaster" );
+ cfg.setProperty( JGroupsBackendQueueProcessorFactory.JG_CLUSTER_NAME, CHANNEL_NAME );
+ cfg.setProperty(
+ JGroupsBackendQueueProcessorFactory.CONFIGURATION_STRING, prepareJGroupsCongigurationString()
+ );
+ }
+
+ private String prepareJGroupsCongigurationString() {
+ return "UDP(mcast_addr=228.1.2.3;mcast_port=45566;ip_ttl=32):" +
+ "PING(timeout=3000;num_initial_members=6):" +
+ "FD(timeout=5000):" +
+ "VERIFY_SUSPECT(timeout=1500):" +
+ "pbcast.NAKACK(gc_lag=10;retransmit_timeout=3000):" +
+ "UNICAST(timeout=5000):" +
+ "FRAG:" +
+ "pbcast.GMS(join_timeout=3000;" +
+ "shun=false;print_local_addr=true)";
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/JGroupsMasterTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/TShirt.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/TShirt.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/TShirt.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.master;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class TShirt {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String logo;
+ private String size;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getLogo() {
+ return logo;
+ }
+
+ public void setLogo(String logo) {
+ this.logo = logo;
+ }
+
+ public String getSize() {
+ return size;
+ }
+
+ public void setSize(String size) {
+ this.size = size;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/master/TShirt.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsReceiver.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsReceiver.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsReceiver.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.slave;
+
+import java.util.List;
+
+import org.jgroups.Message;
+import org.jgroups.ReceiverAdapter;
+
+import org.hibernate.search.backend.LuceneWork;
+
+/**
+ * @author Lukasz Moren
+ */
+
+public class JGroupsReceiver extends ReceiverAdapter {
+
+ public static int queues;
+ public static int works;
+
+ public static void reset() {
+ queues = 0;
+ works = 0;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void receive(Message message) {
+
+ List<LuceneWork> queue;
+ try {
+ queue = ( List<LuceneWork> ) message.getObject();
+ }
+
+ catch ( ClassCastException e ) {
+ return;
+ }
+ queues++;
+ works += queue.size();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsReceiver.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsSlaveTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsSlaveTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsSlaveTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,188 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.slave;
+
+import org.jgroups.Channel;
+import org.jgroups.JChannel;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.backend.impl.jgroups.JGroupsBackendQueueProcessorFactory;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.XMLHelper;
+
+/**
+ * Tests that the Slave node in a JGroups cluster can properly send messages to the channel.
+ * <p/>
+ * In case of running test outside Hibernate Search Maven configuration set following VM configuration:
+ * <br><br>
+ * <code>
+ * -Djava.net.preferIPv4Stack=true -Djgroups.bind_addr=127.0.0.1
+ * </code>
+ *
+ * @author Lukasz Moren
+ */
+
+public class JGroupsSlaveTest extends SearchTestCase {
+
+ public static final String CHANNEL_NAME = "HSearchCluster";
+
+ private Channel channel;
+
+ public void testMessageSend() throws Exception {
+
+ JGroupsReceiver.reset();
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ TShirt ts = new TShirt();
+ ts.setLogo( "Boston" );
+ ts.setSize( "XXL" );
+ TShirt ts2 = new TShirt();
+ ts2.setLogo( "Mapple leaves" );
+ ts2.setSize( "L" );
+ s.persist( ts );
+ s.persist( ts2 );
+ tx.commit();
+
+ //need to sleep for the message consumption
+ Thread.sleep( 500 );
+
+ assertEquals( 1, JGroupsReceiver.queues );
+ assertEquals( 2, JGroupsReceiver.works );
+
+ JGroupsReceiver.reset();
+ s = openSession();
+ tx = s.beginTransaction();
+ ts = ( TShirt ) s.get( TShirt.class, ts.getId() );
+ ts.setLogo( "Peter pan" );
+ tx.commit();
+
+ //need to sleep for the message consumption
+ Thread.sleep( 500 );
+
+ assertEquals( 1, JGroupsReceiver.queues );
+ assertEquals( 2, JGroupsReceiver.works );
+
+ JGroupsReceiver.reset();
+ s = openSession();
+ tx = s.beginTransaction();
+ s.delete( s.get( TShirt.class, ts.getId() ) );
+ tx.commit();
+
+ //Need to sleep for the message consumption
+ Thread.sleep( 500 );
+
+ assertEquals( 1, JGroupsReceiver.queues );
+ assertEquals( 1, JGroupsReceiver.works );
+ s.close();
+ }
+
+ private void prepareJGroupsChannel() throws Exception {
+ channel = new JChannel( XMLHelper.elementFromString( prepareXmlJGroupsConfiguration() ) );
+ channel.connect( CHANNEL_NAME );
+ channel.setReceiver( new JGroupsReceiver() );
+ }
+
+ protected void setUp() throws Exception {
+ super.setUp();
+ prepareJGroupsChannel();
+ }
+
+ protected void tearDown() throws Exception {
+ channel.close();
+ super.tearDown();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.WORKER_BACKEND, "jgroupsSlave" );
+ cfg.setProperty( JGroupsBackendQueueProcessorFactory.CONFIGURATION_XML, prepareXmlJGroupsConfiguration() );
+ }
+
+ private String prepareXmlJGroupsConfiguration() {
+ return "<config>" +
+ "<UDP" +
+ " mcast_addr=\"${jgroups.udp.mcast_addr:228.10.10.10}\"" +
+ " mcast_port=\"${jgroups.udp.mcast_port:45588}\"" +
+ " tos=\"8\"" +
+ " ucast_recv_buf_size=\"20000000\"" +
+ " ucast_send_buf_size=\"640000\"" +
+ " mcast_recv_buf_size=\"25000000\"" +
+ " mcast_send_buf_size=\"640000\"" +
+ " loopback=\"false\"\n" +
+ " discard_incompatible_packets=\"true\"" +
+ " max_bundle_size=\"64000\"" +
+ " max_bundle_timeout=\"30\"" +
+ " use_incoming_packet_handler=\"true\"" +
+ " ip_ttl=\"${jgroups.udp.ip_ttl:2}\"" +
+ " enable_bundling=\"true\"" +
+ " enable_diagnostics=\"true\"" +
+ " use_concurrent_stack=\"true\"" +
+ " thread_naming_pattern=\"pl\"" +
+ " thread_pool.enabled=\"true\"" +
+ " thread_pool.min_threads=\"1\"" +
+ " thread_pool.max_threads=\"25\"" +
+ " thread_pool.keep_alive_time=\"5000\"" +
+ " thread_pool.queue_enabled=\"false\"" +
+ " thread_pool.queue_max_size=\"100\"" +
+ " thread_pool.rejection_policy=\"Run\"" +
+ " oob_thread_pool.enabled=\"true\"" +
+ " oob_thread_pool.min_threads=\"1\"" +
+ " oob_thread_pool.max_threads=\"8\"" +
+ " oob_thread_pool.keep_alive_time=\"5000\"" +
+ " oob_thread_pool.queue_enabled=\"false\"" +
+ " oob_thread_pool.queue_max_size=\"100\"" +
+ " oob_thread_pool.rejection_policy=\"Run\"/>" +
+ "<PING timeout=\"2000\" num_initial_members=\"3\"/>" +
+ "<MERGE2 max_interval=\"30000\" min_interval=\"10000\"/>" +
+ "<FD_SOCK/>" +
+ "<FD timeout=\"10000\" max_tries=\"5\" shun=\"true\"/>" +
+ "<VERIFY_SUSPECT timeout=\"1500\"/>" +
+ "<pbcast.NAKACK " +
+ " use_mcast_xmit=\"false\" gc_lag=\"0\"" +
+ " retransmit_timeout=\"300,600,1200,2400,4800\"" +
+ " discard_delivered_msgs=\"false\"/>" +
+ "<UNICAST timeout=\"300,600,1200,2400,3600\"/>" +
+ "<pbcast.STABLE stability_delay=\"1000\" desired_avg_gossip=\"50000\"" +
+ " max_bytes=\"400000\"/> " +
+ "<pbcast.GMS print_local_addr=\"true\" join_timeout=\"3000\"" +
+ " shun=\"false\"" +
+ " view_bundling=\"true\"/>" +
+ "<FC max_credits=\"20000000\" min_threshold=\"0.10\"/>" +
+ "<FRAG2 frag_size=\"60000\"/>" +
+ "<pbcast.STREAMING_STATE_TRANSFER />" +
+ "<pbcast.FLUSH timeout=\"0\"/>" +
+ "</config>";
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/JGroupsSlaveTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/TShirt.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/TShirt.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/TShirt.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jgroups.slave;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class TShirt {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String logo;
+ private String size;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getLogo() {
+ return logo;
+ }
+
+ public void setLogo(String logo) {
+ this.logo = logo;
+ }
+
+ public String getSize() {
+ return size;
+ }
+
+ public void setSize(String size) {
+ this.size = size;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jgroups/slave/TShirt.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/JMSMasterTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/JMSMasterTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/JMSMasterTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,206 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.master;
+
+import java.io.Serializable;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+import javax.jms.MessageConsumer;
+import javax.jms.ObjectMessage;
+import javax.jms.Queue;
+import javax.jms.QueueConnection;
+import javax.jms.QueueConnectionFactory;
+import javax.jms.QueueSender;
+import javax.jms.QueueSession;
+import javax.naming.Context;
+
+import org.apache.activemq.broker.BrokerService;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.engine.DocumentBuilder;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Tests that the Master node in a JMS cluster can properly process messages placed onto the queue.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class JMSMasterTest extends SearchTestCase {
+
+ /**
+ * Name of the test queue as found in JNDI (see jndi.properties).
+ */
+ private static final String QUEUE_NAME = "queue/searchtest";
+
+ /**
+ * Name of the connection factort as found in JNDI (see jndi.properties).
+ */
+ private static final String CONNECTION_FACTORY_NAME = "java:/ConnectionFactory";
+
+ /**
+ * ActiveMQ message broker.
+ */
+ private BrokerService brokerService;
+
+ private QueueSession queueSession;
+
+ public void testMessageSending() throws Exception {
+
+ TShirt shirt = createObjectWithSQL();
+ List<LuceneWork> queue = createDocumentAndWorkQueue( shirt );
+
+ registerMessageListener();
+ sendMessage( queue );
+
+ // need to sleep to give JMS processing and indexing time
+ Thread.sleep( 1000 );
+
+ FullTextSession ftSess = Search.getFullTextSession( openSession() );
+ ftSess.getTransaction().begin();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ Query luceneQuery = parser.parse( "logo:jboss" );
+ org.hibernate.Query query = ftSess.createFullTextQuery( luceneQuery );
+ List result = query.list();
+ assertEquals( 1, result.size() );
+ ftSess.delete( result.get( 0 ) );
+ ftSess.getTransaction().commit();
+ ftSess.close();
+ }
+
+ private void registerMessageListener() throws Exception {
+ MessageConsumer consumer = getQueueSession().createConsumer( getMessageQueue() );
+ consumer.setMessageListener( new MDBSearchController( getSessions() ) );
+ }
+
+ private void sendMessage(List<LuceneWork> queue) throws Exception {
+ ObjectMessage message = getQueueSession().createObjectMessage();
+ message.setObject( ( Serializable ) queue );
+ QueueSender sender = getQueueSession().createSender( getMessageQueue() );
+ sender.send( message );
+ }
+
+ private Queue getMessageQueue() throws Exception {
+ Context ctx = new javax.naming.InitialContext();
+ return ( Queue ) ctx.lookup( QUEUE_NAME );
+ }
+
+ private QueueSession getQueueSession() throws Exception {
+ if ( queueSession == null ) {
+ Context ctx = new javax.naming.InitialContext();
+ QueueConnectionFactory factory = ( QueueConnectionFactory ) ctx.lookup( CONNECTION_FACTORY_NAME );
+ QueueConnection conn = factory.createQueueConnection();
+ conn.start();
+ queueSession = conn.createQueueSession( false, QueueSession.AUTO_ACKNOWLEDGE );
+
+ }
+ return queueSession;
+ }
+
+ /**
+ * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing.
+ *
+ * @param shirt The shirt to index
+ *
+ * @return A manually create <code>LuceneWork</code> list.
+ */
+ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) {
+ Document doc = new Document();
+ Field field = new Field(
+ DocumentBuilder.CLASS_FIELDNAME, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED
+ );
+ doc.add( field );
+ field = new Field( "id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED );
+ doc.add( field );
+ field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED );
+ doc.add( field );
+ LuceneWork luceneWork = new AddLuceneWork(
+ shirt.getId(), String.valueOf( shirt.getId() ), shirt.getClass(), doc
+ );
+ List<LuceneWork> queue = new ArrayList<LuceneWork>();
+ queue.add( luceneWork );
+ return queue;
+ }
+
+ /**
+ * Create a test object without triggering indexing. Use SQL directly.
+ *
+ * @return a <code>TShirt</code> test object.
+ *
+ * @throws SQLException in case the insert fails.
+ */
+ private TShirt createObjectWithSQL() throws SQLException {
+ Session s = openSession();
+ s.getTransaction().begin();
+ Statement statement = s.connection().createStatement();
+ statement.executeUpdate(
+ "insert into TShirt_Master(id, logo, size_) values( 1, 'JBoss balls', 'large')"
+ );
+ statement.close();
+ TShirt ts = ( TShirt ) s.get( TShirt.class, 1 );
+ s.getTransaction().commit();
+ s.close();
+ return ts;
+ }
+
+ protected void setUp() throws Exception {
+ // create and start the brokerService
+ brokerService = new BrokerService();
+ brokerService.setPersistent( false );
+ brokerService.start();
+
+ super.setUp();
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ if ( brokerService != null ) {
+ brokerService.stop();
+ }
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ // explcitily set the backend even though lucene is default.
+ cfg.setProperty( Environment.WORKER_BACKEND, "lucene" );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/JMSMasterTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/MDBSearchController.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/MDBSearchController.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/MDBSearchController.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,49 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.master;
+
+import org.hibernate.search.backend.impl.jms.AbstractJMSHibernateSearchController;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MDBSearchController extends AbstractJMSHibernateSearchController {
+
+ SessionFactory sessionFactory;
+
+ MDBSearchController( SessionFactory sessionFactory ) {
+ this.sessionFactory = sessionFactory;
+ }
+
+ protected Session getSession() {
+ return sessionFactory.openSession( );
+ }
+
+ protected void cleanSessionIfNeeded(Session session) {
+ session.close();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/MDBSearchController.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/TShirt.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/TShirt.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/TShirt.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,76 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.master;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.Table;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+@Table(name="TShirt_Master")
+public class TShirt {
+ @Id
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String logo;
+ @Column(name="size_")
+ private String size;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getLogo() {
+ return logo;
+ }
+
+ public void setLogo(String logo) {
+ this.logo = logo;
+ }
+
+ public String getSize() {
+ return size;
+ }
+
+ public void setSize(String size) {
+ this.size = size;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/master/TShirt.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/JMSSlaveTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/JMSSlaveTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/JMSSlaveTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,168 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.slave;
+
+import javax.jms.MessageConsumer;
+import javax.jms.Queue;
+import javax.jms.QueueConnection;
+import javax.jms.QueueConnectionFactory;
+import javax.jms.QueueSession;
+import javax.naming.Context;
+
+import org.apache.activemq.broker.BrokerService;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.backend.impl.jms.JMSBackendQueueProcessorFactory;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+
+/**
+ * Checks that the Slave in a JMS configuration proplerly places index jobs onto the queue.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class JMSSlaveTest extends SearchTestCase {
+
+ /**
+ * Name of the test queue as found in JNDI (see jndi.properties).
+ */
+ private static final String QUEUE_NAME = "queue/searchtest";
+
+ /**
+ * Name of the connection factort as found in JNDI (see jndi.properties).
+ */
+ private static final String CONNECTION_FACTORY_NAME = "java:/ConnectionFactory";
+
+ /**
+ * ActiveMQ message broker.
+ */
+ private BrokerService brokerService;
+
+ private QueueSession queueSession;
+
+ public void testMessageSend() throws Exception {
+ registerMessageListener();
+ SearchQueueChecker.reset();
+
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ TShirt ts = new TShirt();
+ ts.setLogo( "Boston" );
+ ts.setSize( "XXL" );
+ TShirt ts2 = new TShirt();
+ ts2.setLogo( "Mapple leaves" );
+ ts2.setSize( "L" );
+ s.persist( ts );
+ s.persist( ts2 );
+ tx.commit();
+
+ //need to sleep for the message consumption
+ Thread.sleep(500);
+
+ assertEquals( 1, SearchQueueChecker.queues );
+ assertEquals( 2, SearchQueueChecker.works );
+
+ SearchQueueChecker.reset();
+ s = openSession();
+ tx = s.beginTransaction();
+ ts = (TShirt) s.get( TShirt.class, ts.getId() );
+ ts.setLogo( "Peter pan" );
+ tx.commit();
+
+ //need to sleep for the message consumption
+ Thread.sleep(500);
+
+ assertEquals( 1, SearchQueueChecker.queues );
+ assertEquals( 2, SearchQueueChecker.works ); //one update = 2 works
+
+ SearchQueueChecker.reset();
+ s = openSession();
+ tx = s.beginTransaction();
+ s.delete( s.get( TShirt.class, ts.getId() ) );
+ s.delete( s.get( TShirt.class, ts2.getId() ) );
+ tx.commit();
+
+ //Need to sleep for the message consumption
+ Thread.sleep(500);
+
+ assertEquals( 1, SearchQueueChecker.queues );
+ assertEquals( 2, SearchQueueChecker.works );
+ s.close();
+ }
+
+ protected void setUp() throws Exception {
+ // create and start the brokerService
+ brokerService = new BrokerService();
+ brokerService.setPersistent( false );
+ brokerService.start();
+
+ super.setUp();
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ if ( brokerService != null ) {
+ brokerService.stop();
+ }
+ }
+
+ private void registerMessageListener() throws Exception {
+ MessageConsumer consumer = getQueueSession().createConsumer( getMessageQueue() );
+ consumer.setMessageListener( new SearchQueueChecker() );
+ }
+
+ private Queue getMessageQueue() throws Exception {
+ Context ctx = new javax.naming.InitialContext();
+ return ( Queue ) ctx.lookup( QUEUE_NAME );
+ }
+
+ private QueueSession getQueueSession() throws Exception {
+ if ( queueSession == null ) {
+ Context ctx = new javax.naming.InitialContext();
+ QueueConnectionFactory factory = ( QueueConnectionFactory ) ctx.lookup( CONNECTION_FACTORY_NAME );
+ QueueConnection conn = factory.createQueueConnection();
+ conn.start();
+ queueSession = conn.createQueueSession( false, QueueSession.AUTO_ACKNOWLEDGE );
+
+ }
+ return queueSession;
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.WORKER_BACKEND, "jms" );
+ cfg.setProperty( JMSBackendQueueProcessorFactory.JMS_CONNECTION_FACTORY, CONNECTION_FACTORY_NAME );
+ cfg.setProperty( JMSBackendQueueProcessorFactory.JMS_QUEUE, QUEUE_NAME );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ TShirt.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/JMSSlaveTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/SearchQueueChecker.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/SearchQueueChecker.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/SearchQueueChecker.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,71 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.slave;
+
+import java.util.List;
+import javax.jms.MessageListener;
+import javax.jms.Message;
+import javax.jms.ObjectMessage;
+import javax.jms.JMSException;
+
+
+import org.hibernate.search.backend.LuceneWork;
+
+/**
+ * Helper class to verify that the Slave places messages onto the queue.
+ *
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class SearchQueueChecker implements MessageListener {
+ public static int queues;
+ public static int works;
+
+ public static void reset() {
+ queues = 0;
+ works = 0;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void onMessage(Message message) {
+ if ( !( message instanceof ObjectMessage ) ) {
+ return;
+ }
+ ObjectMessage objectMessage = ( ObjectMessage ) message;
+
+ List<LuceneWork> queue;
+ try {
+ queue = ( List<LuceneWork> ) objectMessage.getObject();
+ }
+ catch ( JMSException e ) {
+ return;
+ }
+ catch ( ClassCastException e ) {
+ return;
+ }
+ queues++;
+ works += queue.size();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/SearchQueueChecker.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/TShirt.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/TShirt.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/TShirt.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jms.slave;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class TShirt {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+ @Field(index= Index.TOKENIZED)
+ private String logo;
+ @Column(name="size_")
+ private String size;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getLogo() {
+ return logo;
+ }
+
+ public void setLogo(String logo) {
+ this.logo = logo;
+ }
+
+ public String getSize() {
+ return size;
+ }
+
+ public void setSize(String size) {
+ this.size = size;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jms/slave/TShirt.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/Bretzel.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/Bretzel.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/Bretzel.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,86 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jpa;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Bretzel {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.UN_TOKENIZED)
+ private float saltQty;
+
+ @Field(index = Index.UN_TOKENIZED)
+ private float weight;
+
+
+ public Bretzel() {
+ }
+
+ public Bretzel(float saltQty, float weight) {
+ this.saltQty = saltQty;
+ this.weight = weight;
+ }
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public float getSaltQty() {
+ return saltQty;
+ }
+
+ public void setSaltQty(float saltQty) {
+ this.saltQty = saltQty;
+ }
+
+ public float getWeight() {
+ return weight;
+ }
+
+ public void setWeight(float weight) {
+ this.weight = weight;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/Bretzel.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerSerializationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerSerializationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerSerializationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,140 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jpa;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.search.jpa.FullTextEntityManager;
+import org.hibernate.search.jpa.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Serialization test for entity manager. HSEARCH-117.
+ *
+ * @author Hardy Ferentschik
+ */
+public class EntityManagerSerializationTest extends JPATestCase {
+
+ /**
+ * Test that a entity manager can successfully be serialized and
+ * deserialized.
+ *
+ * @throws Exception
+ * in case the test fails.
+ */
+ public void testSerialization() throws Exception {
+ FullTextEntityManager em = Search.getFullTextEntityManager(factory
+ .createEntityManager());
+
+ indexSearchAssert(em);
+
+ File tmpFile = File.createTempFile("entityManager", "ser", null);
+ serializeEM(em, tmpFile);
+ em = deserializeEM(tmpFile);
+
+ indexSearchAssert(em);
+
+ em.close();
+
+ // cleanup
+ tmpFile.delete();
+ }
+
+ private FullTextEntityManager deserializeEM(File tmpFile) throws ClassNotFoundException {
+ FullTextEntityManager em = null;
+ FileInputStream fis = null;
+ ObjectInputStream in = null;
+ try {
+ fis = new FileInputStream(tmpFile);
+ in = new ObjectInputStream(fis);
+ em = (FullTextEntityManager) in.readObject();
+ in.close();
+ }
+ catch (IOException ex) {
+ ex.printStackTrace();
+ fail();
+ }
+ return em;
+ }
+
+ private void serializeEM(FullTextEntityManager em, File tmpFile) {
+ FileOutputStream fos = null;
+ ObjectOutputStream out = null;
+ try {
+ fos = new FileOutputStream(tmpFile);
+ out = new ObjectOutputStream(fos);
+ out.writeObject(em);
+ out.close();
+ } catch (IOException ex) {
+ ex.printStackTrace();
+ fail();
+ }
+ }
+
+ public Class[] getAnnotatedClasses() {
+ return new Class[] { Bretzel.class };
+ }
+
+ /**
+ * Helper method for testing the entity manager before and after
+ * serialization.
+ *
+ * @param em
+ * @throws Exception
+ */
+ private void indexSearchAssert(FullTextEntityManager em) throws Exception {
+ em.getTransaction().begin();
+ Bretzel bretzel = new Bretzel(23, 34);
+ em.persist(bretzel);
+ em.getTransaction().commit();
+ em.clear();
+ em.getTransaction().begin();
+ QueryParser parser = new QueryParser( SearchTestCase.getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse("saltQty:noword");
+ assertEquals(0, em.createFullTextQuery(query).getResultList().size());
+ query = new TermQuery(new Term("saltQty", "23.0"));
+ assertEquals("getResultList", 1, em.createFullTextQuery(query)
+ .getResultList().size());
+ assertEquals("getSingleResult and object retrieval", 23f, ((Bretzel) em
+ .createFullTextQuery(query).getSingleResult()).getSaltQty());
+ assertEquals(1, em.createFullTextQuery(query).getResultSize());
+ em.getTransaction().commit();
+
+ em.clear();
+
+ em.getTransaction().begin();
+ em.remove(em.find(Bretzel.class, bretzel.getId()));
+ em.getTransaction().commit();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerSerializationTest.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jpa;
+
+import org.hibernate.search.jpa.Search;
+import org.hibernate.search.jpa.FullTextEntityManager;
+import org.hibernate.search.test.SearchTestCase;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class EntityManagerTest extends JPATestCase {
+
+ public void testQuery() throws Exception {
+ FullTextEntityManager em = Search.getFullTextEntityManager( factory.createEntityManager() );
+ em.getTransaction().begin();
+ Bretzel bretzel = new Bretzel( 23, 34 );
+ em.persist( bretzel );
+ em.getTransaction().commit();
+ em.clear();
+ em.getTransaction().begin();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+ Query query = parser.parse( "saltQty:noword" );
+ assertEquals( 0, em.createFullTextQuery( query ).getResultList().size() );
+ query = new TermQuery( new Term("saltQty", "23.0") );
+ assertEquals( "getResultList", 1, em.createFullTextQuery( query ).getResultList().size() );
+ assertEquals( "getSingleResult and object retrieval", 23f,
+ ( (Bretzel) em.createFullTextQuery( query ).getSingleResult() ).getSaltQty() );
+ assertEquals( 1, em.createFullTextQuery( query ).getResultSize() );
+ em.getTransaction().commit();
+
+ em.clear();
+
+ em.getTransaction().begin();
+ em.remove( em.find( Bretzel.class, bretzel.getId() ) );
+ em.getTransaction().commit();
+ em.close();
+ }
+
+ public void testIndex() {
+ FullTextEntityManager em = Search.getFullTextEntityManager( factory.createEntityManager() );
+ em.getTransaction().begin();
+ Bretzel bretzel = new Bretzel( 23, 34 );
+ em.persist( bretzel );
+ em.getTransaction().commit();
+ em.clear();
+
+ //Not really a unit test but a test that shows the method call without failing
+ //FIXME port the index test
+ em.getTransaction().begin();
+ em.index( em.find( Bretzel.class, bretzel.getId() ) );
+ em.getTransaction().commit();
+
+ em.getTransaction().begin();
+ em.remove( em.find( Bretzel.class, bretzel.getId() ) );
+ em.getTransaction().commit();
+ em.close();
+ }
+
+ public Class[] getAnnotatedClasses() {
+ return new Class[] {
+ Bretzel.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/EntityManagerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/JPATestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/JPATestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/JPATestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,138 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.jpa;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.ArrayList;import java.util.Arrays;
+import java.io.InputStream;
+import java.io.IOException;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+
+import org.hibernate.cfg.Environment;
+import org.hibernate.ejb.AvailableSettings;
+import org.hibernate.ejb.HibernatePersistence;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.util.Version;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class JPATestCase extends junit.framework.TestCase {
+ protected EntityManagerFactory factory;
+
+ public JPATestCase() {
+ super();
+ }
+
+ public JPATestCase(String name) {
+ super( name );
+ }
+
+ public void setUp() {
+ factory = new HibernatePersistence().createEntityManagerFactory( getConfig() );
+ }
+
+ public void tearDown() {
+ factory.close();
+ }
+
+ public abstract Class[] getAnnotatedClasses();
+
+ public String[] getEjb3DD() {
+ return new String[]{};
+ }
+
+ public Map<Class, String> getCachedClasses() {
+ return new HashMap<Class, String>();
+ }
+
+ public Map<String, String> getCachedCollections() {
+ return new HashMap<String, String>();
+ }
+
+ public static Properties loadProperties() {
+ Properties props = new Properties();
+ InputStream stream = Persistence.class.getResourceAsStream( "/hibernate.properties" );
+ if ( stream != null ) {
+ try {
+ props.load( stream );
+ }
+ catch (Exception e) {
+ throw new RuntimeException( "could not load hibernate.properties" );
+ }
+ finally {
+ try {
+ stream.close();
+ }
+ catch (IOException ioe) {
+ }
+ }
+ }
+ props.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
+ return props;
+ }
+
+ public Map getConfig() {
+ Map config = loadProperties();
+ ArrayList<Class> classes = new ArrayList<Class>();
+
+ classes.addAll( Arrays.asList( getAnnotatedClasses() ) );
+ config.put( AvailableSettings.LOADED_CLASSES, classes );
+ for ( Map.Entry<Class, String> entry : getCachedClasses().entrySet() ) {
+ config.put(
+ AvailableSettings.CLASS_CACHE_PREFIX + "." + entry.getKey().getName(),
+ entry.getValue()
+ );
+ }
+ for ( Map.Entry<String, String> entry : getCachedCollections().entrySet() ) {
+ config.put(
+ AvailableSettings.COLLECTION_CACHE_PREFIX + "." + entry.getKey(),
+ entry.getValue()
+ );
+ }
+ if ( getEjb3DD().length > 0 ) {
+ ArrayList<String> dds = new ArrayList<String>();
+ dds.addAll( Arrays.asList( getEjb3DD() ) );
+ config.put( AvailableSettings.XML_FILE_NAMES, dds );
+ }
+
+ //Search config
+ config.put( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ config.put( org.hibernate.search.Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+
+ return config;
+ }
+
+ public static Version getTargetLuceneVersion() {
+ return SearchTestCase.getTargetLuceneVersion();
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/jpa/JPATestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Construction.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Construction.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Construction.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.optimizer;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Construction {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.TOKENIZED)
+ private String address;
+
+
+ public Construction() {
+ }
+
+ public Construction(String name, String address) {
+ this.name = name;
+ this.address = address;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getAddress() {
+ return address;
+ }
+
+ public void setAddress(String address) {
+ this.address = address;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Construction.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/IncrementalOptimizerStrategyPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/IncrementalOptimizerStrategyPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/IncrementalOptimizerStrategyPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.optimizer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//having name end in *PerfTest disables it from functional tests
+public class IncrementalOptimizerStrategyPerfTest extends OptimizerPerfTest {
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.optimizer.transaction_limit.max", "10" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/IncrementalOptimizerStrategyPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/OptimizerPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/OptimizerPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/OptimizerPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,210 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.optimizer;
+
+import java.io.File;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+//having name end in *PerfTest disables it from functional tests
+public class OptimizerPerfTest extends SearchTestCase {
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for (File file : files) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ public void testConcurrency() throws Exception {
+ int nThreads = 15;
+ ExecutorService es = Executors.newFixedThreadPool( nThreads );
+ Work work = new Work( getSessions() );
+ ReverseWork reverseWork = new ReverseWork( getSessions() );
+ long start = System.currentTimeMillis();
+ int iteration = 100;
+ for (int i = 0; i < iteration; i++) {
+ es.execute( work );
+ es.execute( reverseWork );
+ }
+ while ( work.count < iteration - 1 ) {
+ Thread.sleep( 20 );
+ }
+ System.out.println( iteration + " iterations (8 tx per iteration) in " + nThreads + " threads: " + ( System
+ .currentTimeMillis() - start ) );
+ }
+
+ protected static class Work implements Runnable {
+ private final SessionFactory sf;
+ public volatile int count = 0;
+
+ public Work(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ try {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ Worker w = new Worker( "Emmanuel", 65 );
+ s.persist( w );
+ Construction c = new Construction( "Bellagio", "Las Vagas Nevada" );
+ s.persist( c );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ w = (Worker) s.get( Worker.class, w.getId() );
+ w.setName( "Gavin" );
+ c = (Construction) s.get( Construction.class, c.getId() );
+ c.setName( "W Hotel" );
+ tx.commit();
+ s.close();
+
+ try {
+ Thread.sleep( 50 );
+ }
+ catch (InterruptedException e) {
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ FullTextSession fts = new FullTextSessionImpl( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ Query query;
+ try {
+ query = parser.parse( "name:Gavin" );
+ }
+ catch (ParseException e) {
+ throw new RuntimeException( e );
+ }
+ boolean results = fts.createFullTextQuery( query ).list().size() > 0;
+ //don't test because in case of async, it query happens before actual saving
+ //if ( !results ) throw new RuntimeException( "No results!" );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ w = (Worker) s.get( Worker.class, w.getId() );
+ s.delete( w );
+ c = (Construction) s.get( Construction.class, c.getId() );
+ s.delete( c );
+ tx.commit();
+ s.close();
+ count++;
+ } catch (Throwable t) {
+ t.printStackTrace( );
+ }
+ }
+ }
+
+ protected static class ReverseWork implements Runnable {
+ private SessionFactory sf;
+
+ public ReverseWork(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ try {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ Worker w = new Worker( "Mladen", 70 );
+ s.persist( w );
+ Construction c = new Construction( "Hover Dam", "Croatia" );
+ s.persist( c );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ w = (Worker) s.get( Worker.class, w.getId() );
+ w.setName( "Remi" );
+ c = (Construction) s.get( Construction.class, c.getId() );
+ c.setName( "Palais des festivals" );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ w = (Worker) s.get( Worker.class, w.getId() );
+ s.delete( w );
+ c = (Construction) s.get( Construction.class, c.getId() );
+ s.delete( c );
+ tx.commit();
+ s.close();
+ } catch (Throwable t) {
+ t.printStackTrace( );
+ }
+ }
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Worker.class,
+ Construction.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/OptimizerPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Worker.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Worker.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Worker.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,83 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.optimizer;
+
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Worker {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.UN_TOKENIZED)
+ private int workhours;
+
+
+ public Worker() {
+ }
+
+ public Worker(String name, int workhours) {
+ this.name = name;
+ this.workhours = workhours;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public int getWorkhours() {
+ return workhours;
+ }
+
+ public void setWorkhours(int workhours) {
+ this.workhours = workhours;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/optimizer/Worker.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/Boat.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/Boat.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/Boat.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.perf;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Boat {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Integer id;
+ @Field(store= Store.YES)
+ public String name;
+ @Field
+ public String description;
+
+ public Boat() {}
+
+ public Boat(String name, String description) {
+ this.name = name;
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/Boat.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/IndexTestDontRun.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/IndexTestDontRun.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/IndexTestDontRun.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,122 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.perf;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import junit.textui.TestRunner;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.Directory;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class IndexTestDontRun extends SearchTestCase {
+
+ private static final int TOTAL_SEARCHES = 800;
+ private static final int SEARCH_THREADS = 100;
+
+ public static void main(String[] args) {
+ //isLucene = Boolean.parseBoolean( args[0] );
+ TestRunner.run( IndexTestDontRun.class );
+ }
+
+ public void notestInit() throws Exception {
+ long time = System.currentTimeMillis();
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ for (int i = 0; i < 50000; i++) {
+ s.save( new Boat( "Maria el Seb", "a long" + i + " description of the land" + i ) );
+ }
+ tx.commit();
+ s.close();
+ System.out.println( " init time = " + ( System.currentTimeMillis() - time ) );
+ }
+
+ public void testPerformance() throws Exception {
+ measure(true);//JVM warmup
+ measure(false);//JVM warmup
+ long measureLucene = measure( true );
+ long measureSearch = measure( false );
+ System.out.println( "Totaltime Lucene = " + measureLucene );
+ System.out.println( "Totaltime Search = " + measureSearch );
+ }
+
+ public long measure(boolean plainLucene) throws Exception {
+ ThreadPoolExecutor threadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool( SEARCH_THREADS );
+ threadPool.prestartAllCoreThreads();
+ CountDownLatch startSignal = new CountDownLatch(1);
+ List<SearcherThread> threadsList = new ArrayList<SearcherThread>( TOTAL_SEARCHES );
+ IndexSearcher indexSearcher = getNewSearcher();
+ for (int i = 0; i < TOTAL_SEARCHES; i++) {
+ // Create a thread and invoke it
+ //if ( i % 100 == 0) indexSearcher = getNewSearcher();
+ SearcherThread searcherThread = new SearcherThread( i, "name:maria OR description:long" + i, getSessions(), indexSearcher, plainLucene, startSignal );
+ threadsList.add( searcherThread );
+ threadPool.execute( searcherThread );
+ }
+ threadPool.shutdown();//required to enable awaitTermination functionality
+ startSignal.countDown();//start all created threads
+ boolean terminationOk = threadPool.awaitTermination( 60, TimeUnit.SECONDS );
+ if ( terminationOk==false ) {
+ System.out.println( "No enough time to complete the tests!" );
+ return 0;
+ }
+ long totalTime = 0;
+ for (SearcherThread t : threadsList) totalTime += t.getTime();
+ return totalTime;
+ }
+
+ private IndexSearcher getNewSearcher() throws IOException {
+ final org.hibernate.classic.Session session = getSessions().openSession();
+ Directory d = Search.getFullTextSession( session ).getSearchFactory().getDirectoryProviders( Boat.class )[0].getDirectory();
+ IndexSearcher indexsearcher = new IndexSearcher( d, true );
+ return indexsearcher;
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Boat.class
+ };
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+// cfg.setProperty( "hibernate.search.reader.strategy", DumbSharedReaderProvider.class.getName() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/IndexTestDontRun.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/SearcherThread.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/SearcherThread.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/SearcherThread.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,174 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.perf;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.document.Document;
+import org.slf4j.Logger;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SearcherThread implements Runnable {
+ private static final Logger log = LoggerFactory.make();
+ private final int threadId;
+ private final String queryString;
+ private final SessionFactory sf;
+ private final IndexSearcher indexsearcher;
+ private final boolean isLucene;
+ private final CountDownLatch startSignal;
+ private long time;
+
+ /**
+ * Initialize with thread-id, queryString, indexSearcher
+ * @param startSignal
+ */
+ public SearcherThread(int threadId, String queryString, SessionFactory sf, IndexSearcher indexSearcher, boolean isLucene, CountDownLatch startSignal) {
+ this.isLucene = isLucene;
+ this.threadId = threadId;
+ this.queryString = queryString;
+ this.sf = sf;
+ this.indexsearcher = indexSearcher;
+ this.startSignal = startSignal;
+ }
+
+ /**
+ * @see java.lang.Runnable#run()
+ */
+ public void run() {
+ try {
+ startSignal.await();
+ } catch (InterruptedException e) {
+ log.error( "tests canceled", e );
+ return;
+ }
+ if ( isLucene ) {
+ runLucene();
+ }
+ else {
+ runHSearch();
+ }
+ }
+
+ public void runLucene() {
+ try {
+ Query q = getQuery();
+ long start = System.currentTimeMillis();
+ // Search
+ TopDocs hits = indexsearcher.search( q, 1000 );
+ List<String> names = new ArrayList<String>(100);
+ for (int i = 0 ; i < 100 ; i++) {
+ Document doc = getDocument( indexsearcher, hits.scoreDocs[i].doc );
+ names.add( doc.get( "name" ) );
+ }
+ int resultSize = hits.totalHits;
+ long totalTime = System.currentTimeMillis() - start;
+// log.error( "Lucene [ Thread-id : " + threadId + " ] Total time taken for search is : " + totalTime + "ms with total no. of matching records : " + hits.length() );
+ setTime( totalTime );
+ }
+ catch (ParseException e) {
+ System.out.println( "[ Thread-id : " + threadId + " ] Parse Exception for queryString : " + queryString );
+ e.printStackTrace();
+ }
+ catch (IOException e) {
+ System.out.println( "[ Thread-id : " + threadId + " ] IO Exception for queryString : " + queryString );
+ }
+ catch (Exception e) {
+ e.printStackTrace( );
+ }
+ }
+
+ private Document getDocument(Searcher searcher, int docId ) {
+ try {
+ return searcher.doc( docId );
+ } catch (IOException ioe) {
+ throw new SearchException( "Unable to retrieve document", ioe );
+ }
+ }
+
+ private Query getQuery() throws ParseException {
+ QueryParser qp = new QueryParser( SearchTestCase.getTargetLuceneVersion(), "t", SearchTestCase.standardAnalyzer );
+ qp.setLowercaseExpandedTerms( true );
+ // Parse the query
+ Query q = qp.parse( queryString );
+ if ( q instanceof BooleanQuery ) {
+ BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
+ }
+ return q;
+ }
+
+ public void runHSearch() {
+ try {
+ Query q = getQuery();
+ // Search
+ FullTextSession ftSession = Search.getFullTextSession( sf.openSession( ) );
+ final FullTextQuery textQuery = ftSession.createFullTextQuery( q, Boat.class )
+ .setMaxResults( 100 ).setProjection( "name" );
+ long start = System.currentTimeMillis();
+ List results = textQuery.list();
+ int resultSize = textQuery.getResultSize();
+ long totalTime = System.currentTimeMillis() - start;
+ ftSession.close();
+// log.error( "HSearch [ Thread-id : " + threadId + " ] Total time taken for search is : " + totalTime + "ms with total no. of matching records : " + resultSize );
+ setTime( totalTime );
+ }
+ catch (ParseException e) {
+ log.error( "[ Thread-id : " + threadId + " ] Parse Exception for queryString : " + queryString );
+ e.printStackTrace();
+ }
+ catch (Throwable e) {
+ log.error( "[ Thread-id : " + threadId + " ] Exception for queryString : " + queryString );
+ e.printStackTrace( );
+ }
+ }
+
+ public synchronized long getTime() {
+ return time;
+ }
+
+ public synchronized void setTime(long time) {
+ this.time = time;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/perf/SearcherThread.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/AlternateBook.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/AlternateBook.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/AlternateBook.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="Book")
+public class AlternateBook {
+ @Id @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String summary;
+
+
+ public AlternateBook() {
+ }
+
+ public AlternateBook(Integer id, String summary) {
+ this.id = id;
+ this.summary = summary;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/AlternateBook.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Author.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Author.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Author.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Author {
+ @Id @GeneratedValue @DocumentId
+ private Integer id;
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Field(index = Index.TOKENIZED, store = Store.YES)
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Author.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Book.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Book.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Book.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,126 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+import javax.persistence.ManyToOne;
+
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Fields;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Resolution;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "Book" )
+public class Book {
+
+ private Integer id;
+ private String body;
+ private String summary;
+ private Set<Author> authors = new HashSet<Author>();
+ private Author mainAuthor;
+ private Date publicationDate;
+
+ @IndexedEmbedded
+ @ManyToOne
+ public Author getMainAuthor() {
+ return mainAuthor;
+ }
+
+ public void setMainAuthor(Author mainAuthor) {
+ this.mainAuthor = mainAuthor;
+ }
+
+ @ManyToMany
+ public Set<Author> getAuthors() {
+ return authors;
+ }
+
+ public void setAuthors(Set<Author> authors) {
+ this.authors = authors;
+ }
+
+ public Book() {
+ }
+
+ public Book(Integer id, String summary, String body) {
+ this.id = id;
+ this.summary = summary;
+ this.body = body;
+ }
+
+ @Field(index=Index.TOKENIZED, store=Store.NO)
+ public String getBody() {
+ return body;
+ }
+
+ public void setBody(String body) {
+ this.body = body;
+ }
+
+ @Id @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Fields( {
+ @Field(index = Index.TOKENIZED, store = Store.YES),
+ @Field(name = "summary_forSort", index = Index.UN_TOKENIZED, store = Store.YES)
+ } )
+ public String getSummary() {
+ return summary;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ @Field(index=Index.UN_TOKENIZED, store=Store.YES)
+ @DateBridge(resolution=Resolution.SECOND)
+ public Date getPublicationDate() {
+ return publicationDate;
+ }
+
+ public void setPublicationDate(Date publicationDate) {
+ this.publicationDate = publicationDate;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Book.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Clock.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Clock.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Clock.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,70 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Clock {
+ private Integer id;
+ private String brand;
+
+ public Clock() {
+ }
+
+ public Clock(Integer id, String brand) {
+ this.id = id;
+ this.brand = brand;
+ }
+
+ @Field(index= Index.TOKENIZED, store= Store.YES)
+ public String getBrand() {
+ return brand;
+ }
+
+ public void setBrand(String brand) {
+ this.brand = brand;
+ }
+
+ @Id @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Clock.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ElectricalProperties.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ElectricalProperties.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ElectricalProperties.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,73 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.TermVector;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+public class ElectricalProperties {
+ private int id;
+ private String content;
+
+ public ElectricalProperties() {
+
+ }
+
+ public ElectricalProperties(int id, String content) {
+ this.id = id;
+ this.content = content;
+ }
+
+ @Field( index = Index.TOKENIZED, store = Store.YES, termVector = TermVector.WITH_POSITION_OFFSETS )
+ public String getContent() {
+ return content;
+ }
+
+ public void setContent(String content) {
+ this.content = content;
+ }
+
+ @Id
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ElectricalProperties.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Employee.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Employee.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Employee.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,97 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.Date;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.DateBridge;
+import org.hibernate.search.annotations.Resolution;
+
+/**
+ * @author John Grffin
+ */
+@Entity
+@Indexed
+public class Employee {
+ private Integer id;
+ private String lastname;
+ private String dept;
+
+ @Field(store=Store.YES, index = Index.UN_TOKENIZED)
+ @DateBridge(resolution = Resolution.DAY)
+ public Date getHireDate() {
+ return hireDate;
+ }
+
+ public void setHireDate(Date hireDate) {
+ this.hireDate = hireDate;
+ }
+
+ private Date hireDate;
+
+ public Employee() {
+ }
+
+ public Employee(Integer id, String lastname, String dept) {
+ this.id = id;
+ this.lastname = lastname;
+ this.dept = dept;
+ }
+
+ @Id
+ @DocumentId
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Field( index = Index.NO, store = Store.YES )
+ public String getLastname() {
+ return lastname;
+ }
+
+ public void setLastname(String lastname) {
+ this.lastname = lastname;
+ }
+
+ @Field( index = Index.TOKENIZED, store = Store.YES )
+ public String getDept() {
+ return dept;
+ }
+
+ public void setDept(String dept) {
+ this.dept = dept;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Employee.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQuerySortTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQuerySortTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQuerySortTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,166 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.List;
+import java.util.Calendar;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.test.SearchTestCase;
+
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class LuceneQuerySortTest extends SearchTestCase {
+
+ /**
+ * Test that we can change the default sort order of the lucene search result.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testList() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ createTestBooks(s);
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:lucene" );
+ FullTextQuery hibQuery = s.createFullTextQuery( query, Book.class );
+ List<Book> result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of test results.", 3, result.size() );
+ // make sure that the order is according to in which order the books got inserted
+ // into the index.
+ int id = 1;
+ for(Book b : result) {
+ assertEquals( "Expected another id", Integer.valueOf( id ), b.getId() );
+ id++;
+ }
+
+ // now the same query, but with a lucene sort specified.
+ query = parser.parse( "summary:lucene" );
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ Sort sort = new Sort( new SortField( "id", SortField.STRING, true ) );
+ hibQuery.setSort(sort);
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of test results.", 3, result.size() );
+ id = 3;
+ for (Book b : result) {
+ assertEquals( "Expected another id", Integer.valueOf( id ), b.getId() );
+ id--;
+ }
+
+ // order by summary
+ query = parser.parse( "summary:lucene OR summary:action" );
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ sort = new Sort( new SortField( "summary_forSort", SortField.STRING ) ); //ASC
+ hibQuery.setSort( sort );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of test results.", 4, result.size() );
+ assertEquals( "Groovy in Action", result.get( 0 ).getSummary() );
+
+ // order by summary backwards
+ query = parser.parse( "summary:lucene OR summary:action" );
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ sort = new Sort( new SortField( "summary_forSort", SortField.STRING, true ) ); //DESC
+ hibQuery.setSort( sort );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of test results.", 4, result.size() );
+ assertEquals( "Hibernate & Lucene", result.get( 0 ).getSummary() );
+
+ // order by date backwards
+ query = parser.parse( "summary:lucene OR summary:action" );
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ sort = new Sort( new SortField( "publicationDate", SortField.STRING, true ) ); //DESC
+ hibQuery.setSort( sort );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Wrong number of test results.", 4, result.size() );
+ for (Book book : result) {
+ System.out.println(book.getSummary() + " : " + book.getPublicationDate() );
+ }
+ assertEquals( "Groovy in Action", result.get( 0 ).getSummary() );
+
+ tx.commit();
+
+ deleteTestBooks(s);
+ s.close();
+ }
+
+ /**
+ * Helper method creating three books with the same title and summary.
+ * When searching for these books the results should be returned in the order
+ * they got added to the index.
+ *
+ * @param s The full text session used to index the test data.
+ */
+ private void createTestBooks(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ Calendar cal = Calendar.getInstance( );
+ cal.set( 2007, 7, 25, 11, 20, 30);
+ Book book = new Book(1, "Hibernate & Lucene", "This is a test book.");
+ book.setPublicationDate( cal.getTime() );
+ s.save(book);
+ cal.add( Calendar.SECOND, 1 );
+ book = new Book(2, "Hibernate & Lucene", "This is a test book.");
+ book.setPublicationDate( cal.getTime() );
+ s.save(book);
+ cal.add( Calendar.SECOND, 1 );
+ book = new Book(3, "Hibernate & Lucene", "This is a test book.");
+ book.setPublicationDate( cal.getTime() );
+ s.save(book);
+ cal.add( Calendar.SECOND, 1 );
+ book = new Book(4, "Groovy in Action", "The bible of Groovy");
+ book.setPublicationDate( cal.getTime() );
+ s.save(book);
+ tx.commit();
+ s.clear();
+ }
+
+ private void deleteTestBooks(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ s.createQuery( "delete " + Book.class.getName() ).executeUpdate();
+ tx.commit();
+ s.clear();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Author.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQuerySortTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQueryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQueryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQueryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,683 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.FetchMode;
+import org.hibernate.Hibernate;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.stat.Statistics;
+
+
+/**
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ */
+public class LuceneQueryTest extends SearchTestCase {
+
+ public void testList() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( 0, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter", 2, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with one class filter", 1, result.size() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no class filter", 2, result.size() );
+ for (Object element : result) {
+ assertTrue( Hibernate.isInitialized( element ) );
+ s.delete( element );
+ }
+ s.flush();
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with delete objects", 0, result.size() );
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testResultSize() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:Festina Or brand:Seiko" );
+ Statistics stats = s.getSessionFactory().getStatistics();
+ stats.clear();
+ boolean enabled = stats.isStatisticsEnabled();
+ if ( !enabled ) stats.setStatisticsEnabled( true );
+ FullTextQuery hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ assertEquals( "Exection of getResultSize without actual results", 2, hibQuery.getResultSize() );
+ assertEquals( "No entity should be loaded", 0, stats.getEntityLoadCount() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "2 entities should be loaded", 2, stats.getEntityLoadCount() );
+ if ( !enabled ) stats.setStatisticsEnabled( false );
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFirstMax() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:Festina Or brand:Seiko" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ hibQuery.setFirstResult( 1 );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "first result no max result", 1, result.size() );
+
+ hibQuery.setFirstResult( 0 );
+ hibQuery.setMaxResults( 1 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "max result set", 1, result.size() );
+
+ hibQuery.setFirstResult( 0 );
+ hibQuery.setMaxResults( 3 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "max result out of limit", 2, result.size() );
+
+ hibQuery.setFirstResult( 2 );
+ hibQuery.setMaxResults( 3 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "first result out of limit", 0, result.size() );
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testIterator() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ Iterator result = hibQuery.iterate();
+ assertNotNull( result );
+ assertFalse( result.hasNext() );
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.iterate();
+ assertNotNull( result );
+ int index = 0;
+ while ( result.hasNext() ) {
+ index++;
+ s.delete( result.next() );
+ }
+ assertEquals( 2, index );
+
+ s.flush();
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.iterate();
+ assertNotNull( result );
+ assertFalse( result.hasNext() );
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testScrollableResultSet() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ clock = new Clock( 2, "Festina" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ book = new Book( 2, "La gloire de mon père", "Les deboires de mon père en vélo" );
+ s.save( book );
+ tx.commit();//post commit events for lucene
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:noword" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ ScrollableResults result = hibQuery.scroll();
+ assertNotNull( result );
+ assertEquals( -1, result.getRowNumber() );
+ assertEquals( false, result.next() );
+ result.close();
+
+ query = parser.parse( "summary:Festina Or brand:Seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ result = hibQuery.scroll();
+ assertEquals( -1, result.getRowNumber() );
+ result.beforeFirst();
+ assertEquals( true, result.next() );
+ assertTrue( result.isFirst() );
+ assertTrue( result.scroll( 1 ) );
+ assertTrue( result.isLast() );
+ assertFalse( result.scroll( 1 ) );
+ result.beforeFirst();
+ while ( result.next() ) {
+ s.delete( result.get()[0] );
+ }
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ // Technically this is checked by other tests but let's do it anyway. J.G.
+ public void testDefaultFetchSize() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+
+ ScrollableResults projections = hibQuery.scroll();
+ projections.beforeFirst();
+ Object[] projection = projections.get();
+ assertNull( projection );
+
+ projections.next();
+ assertTrue( projections.isFirst() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFetchSizeLargerThanHits() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+ hibQuery.setFetchSize( 6 );
+
+ ScrollableResults results = hibQuery.scroll();
+ results.beforeFirst();
+ results.next();
+ Object[] result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1000, result[0] );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFetchSizeDefaultFirstAndMax() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+ hibQuery.setFetchSize( 3 );
+
+ ScrollableResults results = hibQuery.scroll();
+ results.beforeFirst();
+ results.next();
+ Object[] result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1000, result[0] );
+ results.scroll( 2 );
+ result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1003, result[0] );
+ // check cache addition
+ results.next();
+ result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1004, result[0] );
+
+ results.scroll( -2 );
+ result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1002, result[0] );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFetchSizeNonDefaultFirstAndMax() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+ hibQuery.setFetchSize( 3 );
+ hibQuery.setFirstResult( 1 );
+ hibQuery.setMaxResults( 3 );
+
+ ScrollableResults results = hibQuery.scroll();
+ results.beforeFirst();
+ results.next();
+ Object[] result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1002, result[0] );
+
+ results.scroll( 2 );
+ result = results.get();
+ assertEquals( "incorrect entityInfo returned", 1004, result[0] );
+
+ results.next();
+ result = results.get();
+ assertNull( result );
+
+ results.scroll( -8 );
+ result = results.get();
+ assertNull( result );
+
+ // And test a bad forward scroll.
+ results.scroll( 10 );
+ result = results.get();
+ assertNull( result );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testFetchSizeNonDefaultFirstAndMaxNoHits() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+ hibQuery.setFetchSize( 3 );
+ hibQuery.setFirstResult( 1 );
+ hibQuery.setMaxResults( 3 );
+
+ ScrollableResults results = hibQuery.scroll();
+ results.beforeFirst();
+ Object[] result = results.get();
+ assertNull( "non-null entity infos returned", result );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testCurrent() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept" );
+
+
+
+ ScrollableResults results = hibQuery.scroll();
+ results.beforeFirst();
+ results.next();
+ assertTrue("beforeFirst() pointer incorrect", results.isFirst());
+
+ results.afterLast();
+ results.previous();
+ assertTrue("afterLast() pointer incorrect", results.isLast());
+
+ // Let's see if a bad reverse scroll screws things up
+ results.scroll( -8 );
+ results.next();
+ assertTrue("large negative scroll() pointer incorrect", results.isFirst());
+
+ // And test a bad forward scroll.
+ results.scroll( 10 );
+ results.previous();
+ assertTrue("large positive scroll() pointer incorrect", results.isLast());
+
+ // Finally, let's test a REAL screwup.
+ hibQuery.setFirstResult( 3 );
+ hibQuery.setMaxResults( 1 );
+
+ results = hibQuery.scroll();
+ results.first();
+ Object[] result = results.get();
+ assertEquals(1004, result[0]);
+
+ results.last();
+ result = results.get();
+ assertEquals(1004, result[0]);
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testMultipleEntityPerIndex() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Clock clock = new Clock( 1, "Seiko" );
+ s.save( clock );
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ AlternateBook alternateBook = new AlternateBook( 1, "La chute de la petite reine a travers les yeux de Festina" );
+ s.save( alternateBook );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:Festina" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter", 1, result.size() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ Iterator it = hibQuery.iterate();
+ assertTrue( it.hasNext() );
+ assertNotNull( it.next() );
+ assertFalse( it.hasNext() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ ScrollableResults sr = hibQuery.scroll();
+ assertTrue( sr.first() );
+ assertNotNull( sr.get() );
+ assertFalse( sr.next() );
+ sr.close();
+
+ query = parser.parse( "summary:Festina OR brand:seiko" );
+ hibQuery = s.createFullTextQuery( query, Clock.class, Book.class );
+ hibQuery.setMaxResults( 2 );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit class filter and limit", 2, result.size() );
+
+ query = parser.parse( "summary:Festina" );
+ hibQuery = s.createFullTextQuery( query );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no class filter", 2, result.size() );
+ for (Object element : result) {
+ assertTrue( Hibernate.isInitialized( element ) );
+ s.delete( element );
+ }
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testCriteria() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Book book = new Book( 1, "La chute de la petite reine a travers les yeux de Festina", "La chute de la petite reine a travers les yeux de Festina, blahblah" );
+ s.save( book );
+ Author emmanuel = new Author();
+ emmanuel.setName( "Emmanuel" );
+ s.save( emmanuel );
+ book.getAuthors().add( emmanuel );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:Festina" );
+ org.hibernate.Query hibQuery = s.createFullTextQuery( query, Book.class );
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no explicit criteria", 1, result.size() );
+ book = (Book) result.get( 0 );
+ assertFalse( "Association should not be inintialized", Hibernate.isInitialized( book.getAuthors() ) );
+
+ result = s.createFullTextQuery( query ).setCriteriaQuery(
+ s.createCriteria( Book.class ).setFetchMode( "authors", FetchMode.JOIN ) ).list();
+ assertNotNull( result );
+ assertEquals( "Query with explicit criteria", 1, result.size() );
+ book = (Book) result.get( 0 );
+ assertTrue( "Association should be inintialized", Hibernate.isInitialized( book.getAuthors() ) );
+ assertEquals( 1, book.getAuthors().size() );
+
+ //cleanup
+ Author author = book.getAuthors().iterator().next();
+ book.getAuthors().remove( author );
+
+ for (Object element : s.createQuery( "from java.lang.Object" ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testScrollEmptyHits() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+
+ ScrollableResults projections = hibQuery.scroll();
+ projections.beforeFirst();
+ projections.next();
+ Object[] projection = projections.get();
+ assertNull( projection );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10 ).setMaxResults( 20 );
+
+ projections = hibQuery.scroll();
+ projections.beforeFirst();
+ projections.next();
+ projection = projections.get();
+ assertNull( projection );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testListEmptyHits() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ List result = hibQuery.list();
+ assertEquals( 0, result.size() );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10 ).setMaxResults( 20 );
+ result = hibQuery.list();
+ assertEquals( 0, result.size() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ public void testIterateEmptyHits() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:XXX" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ Iterator iter = hibQuery.iterate();
+ assertFalse( iter.hasNext() );
+
+ hibQuery = s.createFullTextQuery( query, Employee.class ).setFirstResult( 10 ).setMaxResults( 20 );
+ iter = hibQuery.iterate();
+ assertFalse( iter.hasNext() );
+
+ //cleanup
+ for (Object element : s.createQuery( "from " + Employee.class.getName() ).list()) s.delete( element );
+ tx.commit();
+ s.close();
+ }
+
+ private void prepEmployeeIndex(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ Employee e1 = new Employee( 1000, "Griffin", "ITech" );
+ s.save( e1 );
+ Employee e2 = new Employee( 1001, "Jackson", "Accounting" );
+ s.save( e2 );
+ Employee e3 = new Employee( 1002, "Jimenez", "ITech" );
+ s.save( e3 );
+ Employee e4 = new Employee( 1003, "Stejskal", "ITech" );
+ s.save( e4 );
+ Employee e5 = new Employee( 1004, "Whetbrook", "ITech" );
+ s.save( e5 );
+
+ tx.commit();
+ }
+
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ AlternateBook.class,
+ Clock.class,
+ Author.class,
+ Employee.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/LuceneQueryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/MultiClassesQueryLoaderTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/MultiClassesQueryLoaderTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/MultiClassesQueryLoaderTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.sql.Statement;
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MultiClassesQueryLoaderTest extends SearchTestCase {
+
+ public void testObjectNotFound() throws Exception {
+ Session sess = openSession();
+ Transaction tx = sess.beginTransaction();
+ Author author = new Author();
+ author.setName( "Moo Cow" );
+ sess.persist( author );
+
+ tx.commit();
+ sess.clear();
+ Statement statement = sess.connection().createStatement();
+ statement.executeUpdate( "DELETE FROM Author" );
+ statement.close();
+ FullTextSession s = Search.getFullTextSession( sess );
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.keywordAnalyzer );
+ Query query = parser.parse( "name:moo" );
+ FullTextQuery hibQuery = s.createFullTextQuery( query, Author.class, Music.class );
+ List result = hibQuery.list();
+ assertEquals( "Should have returned no author", 0, result.size() );
+
+ for (Object o : s.createCriteria( Object.class ).list()) {
+ s.delete( o );
+ }
+
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Author.class,
+ Music.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/MultiClassesQueryLoaderTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Music.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Music.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Music.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,103 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.ManyToMany;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
+
+@Entity
+@Indexed()
+public class Music {
+ protected Long id;
+ protected String title;
+ protected Set<Author> authors = new HashSet<Author>();
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ /**
+ * @return the singers
+ */
+ @ManyToMany( cascade = CascadeType.ALL,
+ fetch = FetchType.EAGER,
+ targetEntity = Author.class )
+ @IndexedEmbedded( depth = 1 )
+ public Set<Author> getAuthors() {
+ return authors;
+ }
+
+ /**
+ * @param authors the authors to set
+ */
+ public void setAuthors(Set<Author> authors) {
+ this.authors = authors;
+ }
+
+ public void addAuthor(Author author) {
+ this.getAuthors().add( author );
+ }
+
+ /**
+ * @return the title
+ */
+ @Column( name = "title",
+ length = 255,
+ nullable = false )
+ @Field( name = "title",
+ index = Index.TOKENIZED,
+ store = Store.YES )
+ public String getTitle() {
+ return title;
+ }
+
+ /**
+ * @param title the title to set
+ */
+ public void setTitle(String title) {
+ this.title = title;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Music.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Person.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Person.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Person.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,55 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+public class Person {
+ @Id @GeneratedValue
+ private Integer id;
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/Person.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionQueryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionQueryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionQueryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,496 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.io.Serializable;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Date;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Tests several aspects of projection queries.
+ *
+ * @author Emmanuel Bernard
+ * @author John Griffin
+ * @author Hardy Ferentschik
+ */
+public class ProjectionQueryTest extends SearchTestCase {
+
+ /**
+ * HSEARCH-296
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testClassProjection() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( FullTextQuery.OBJECT_CLASS );
+
+ List result = hibQuery.list();
+ assertNotNull( result );
+
+ Object[] projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+ assertEquals( "Wrong projected class", Employee.class, projection[0] );
+
+ tx.commit();
+ s.close();
+ }
+
+ public void testLuceneObjectsProjectionWithScroll() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ // Is the 'FullTextQuery.ID' value correct here? Do we want the Lucene internal document number?
+ hibQuery.setProjection(
+ "id",
+ "lastname",
+ "dept",
+ FullTextQuery.THIS,
+ FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT,
+ FullTextQuery.ID
+ );
+
+ ScrollableResults projections = hibQuery.scroll();
+
+ // There are a lot of methods to check in ScrollableResultsImpl
+ // so, we'll use methods to check each projection as needed.
+
+ projections.beforeFirst();
+ projections.next();
+ Object[] projection = projections.get();
+ checkProjectionFirst( projection, s );
+ assertTrue( projections.isFirst() );
+
+ projections.last();
+ projection = projections.get();
+ checkProjectionLast( projection, s );
+ assertTrue( projections.isLast() );
+
+ projections.next();
+ projection = projections.get();
+ assertNull( projection );
+
+ projections.previous();
+ projection = projections.get();
+ checkProjectionLast( projection, s );
+
+ projections.first();
+ projection = projections.get();
+ checkProjectionFirst( projection, s );
+
+ projections.scroll( 2 );
+ projection = projections.get();
+ checkProjection2( projection, s );
+
+ projections.scroll( -5 );
+ projection = projections.get();
+ assertNull( projection );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testResultTransformToDelimString() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE, FullTextQuery.ID );
+ hibQuery.setResultTransformer( new ProjectionToDelimStringResultTransformer() );
+
+ @SuppressWarnings("unchecked")
+ List<String> result = hibQuery.list();
+ assertTrue( "incorrect transformation", result.get( 0 ).startsWith( "1000, Griffin, ITech" ) );
+ assertTrue( "incorrect transformation", result.get( 1 ).startsWith( "1002, Jimenez, ITech" ) );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testResultTransformMap() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection(
+ "id",
+ "lastname",
+ "dept",
+ FullTextQuery.THIS,
+ FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT,
+ FullTextQuery.ID
+ );
+
+ hibQuery.setResultTransformer( new ProjectionToMapResultTransformer() );
+
+ List transforms = hibQuery.list();
+ Map map = ( Map ) transforms.get( 1 );
+ assertEquals( "incorrect transformation", "ITech", map.get( "dept" ) );
+ assertEquals( "incorrect transformation", 1002, map.get( "id" ) );
+ assertTrue( "incorrect transformation", map.get( FullTextQuery.DOCUMENT ) instanceof Document );
+ assertEquals(
+ "incorrect transformation", "1002", ( ( Document ) map.get( FullTextQuery.DOCUMENT ) ).get( "id" )
+ );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ private void checkProjectionFirst(Object[] projection, Session s) {
+ assertEquals( "id incorrect", 1000, projection[0] );
+ assertEquals( "lastname incorrect", "Griffin", projection[1] );
+ assertEquals( "dept incorrect", "ITech", projection[2] );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
+ assertTrue( "SCORE incorrect", projection[4] instanceof Float );
+ assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
+ assertEquals( "legacy ID incorrect", 1000, projection[6] );
+ }
+
+ private void checkProjectionLast(Object[] projection, Session s) {
+ assertEquals( "id incorrect", 1004, projection[0] );
+ assertEquals( "lastname incorrect", "Whetbrook", projection[1] );
+ assertEquals( "dept incorrect", "ITech", projection[2] );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
+ assertTrue( "SCORE incorrect", projection[4] instanceof Float );
+ assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
+ assertEquals( "legacy ID incorrect", 1004, projection[6] );
+ }
+
+ private void checkProjection2(Object[] projection, Session s) {
+ assertEquals( "id incorrect", 1003, projection[0] );
+ assertEquals( "lastname incorrect", "Stejskal", projection[1] );
+ assertEquals( "dept incorrect", "ITech", projection[2] );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
+ assertTrue( "SCORE incorrect", projection[4] instanceof Float );
+ assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
+ assertEquals( "legacy ID incorrect", 1003, projection[6] );
+ }
+
+ public void testLuceneObjectsProjectionWithIterate() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:ITech" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection(
+ "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT, FullTextQuery.ID
+ );
+
+ int counter = 0;
+
+ for ( Iterator iter = hibQuery.iterate(); iter.hasNext(); ) {
+ Object[] projection = ( Object[] ) iter.next();
+ assertNotNull( projection );
+ counter++;
+ assertEquals( "dept incorrect", "ITech", projection[2] );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
+ assertTrue( "SCORE incorrect", projection[4] instanceof Float );
+ assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 4, ( ( Document ) projection[5] ).getFields().size() );
+ }
+ assertEquals( "incorrect number of results returned", 4, counter );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testLuceneObjectsProjectionWithList() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:Accounting" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection(
+ "id", "lastname", "dept", FullTextQuery.THIS, FullTextQuery.SCORE,
+ FullTextQuery.DOCUMENT, FullTextQuery.ID, FullTextQuery.DOCUMENT_ID
+ );
+
+ List result = hibQuery.list();
+ assertNotNull( result );
+
+ Object[] projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+ assertEquals( "id incorrect", 1001, projection[0] );
+ assertEquals( "last name incorrect", "Jackson", projection[1] );
+ assertEquals( "dept incorrect", "Accounting", projection[2] );
+ assertEquals( "THIS incorrect", "Jackson", ( ( Employee ) projection[3] ).getLastname() );
+ assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, ( Serializable ) projection[0] ) );
+ assertTrue( "SCORE incorrect", projection[4] instanceof Float );
+ assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[5] ).getFields().size() );
+ assertEquals( "ID incorrect", 1001, projection[6] );
+ assertNotNull( "Lucene internal doc id", projection[7] );
+
+ // Change the projection order and null one
+ hibQuery.setProjection(
+ FullTextQuery.DOCUMENT, FullTextQuery.THIS, FullTextQuery.SCORE, null, FullTextQuery.ID,
+ "id", "lastname", "dept", "hireDate", FullTextQuery.DOCUMENT_ID
+ );
+
+ result = hibQuery.list();
+ assertNotNull( result );
+
+ projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+
+ assertTrue( "DOCUMENT incorrect", projection[0] instanceof Document );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[0] ).getFields().size() );
+ assertEquals( "THIS incorrect", projection[1], s.get( Employee.class, ( Serializable ) projection[4] ) );
+ assertTrue( "SCORE incorrect", projection[2] instanceof Float );
+ assertNull( "BOOST not removed", projection[3] );
+ assertEquals( "ID incorrect", 1001, projection[4] );
+ assertEquals( "id incorrect", 1001, projection[5] );
+ assertEquals( "last name incorrect", "Jackson", projection[6] );
+ assertEquals( "dept incorrect", "Accounting", projection[7] );
+ assertNotNull( "Date", projection[8] );
+ assertNotNull( "Lucene internal doc id", projection[9] );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ public void testNonLoadedFieldOptmization() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ prepEmployeeIndex( s );
+
+ Transaction tx;
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "dept", SearchTestCase.standardAnalyzer );
+
+ Query query = parser.parse( "dept:Accounting" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Employee.class );
+ hibQuery.setProjection( FullTextQuery.ID, FullTextQuery.DOCUMENT );
+
+ List result = hibQuery.list();
+ assertNotNull( result );
+
+ Object[] projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+ assertEquals( "id field name not projected", 1001, projection[0] );
+ assertEquals(
+ "Document fields should not be lazy on DOCUMENT projection",
+ "Jackson", ( ( Document ) projection[1] ).getField( "lastname" ).stringValue()
+ );
+ assertEquals( "DOCUMENT size incorrect", 5, ( ( Document ) projection[1] ).getFields().size() );
+
+ // Change the projection order and null one
+ hibQuery.setProjection( FullTextQuery.THIS, FullTextQuery.SCORE, null, "lastname" );
+
+ result = hibQuery.list();
+ assertNotNull( result );
+
+ projection = ( Object[] ) result.get( 0 );
+ assertNotNull( projection );
+
+ assertTrue( "THIS incorrect", projection[0] instanceof Employee );
+ assertTrue( "SCORE incorrect", projection[1] instanceof Float );
+ assertEquals( "last name incorrect", "Jackson", projection[3] );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Employee.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ private void prepEmployeeIndex(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ Employee e1 = new Employee( 1000, "Griffin", "ITech" );
+ s.save( e1 );
+ Employee e2 = new Employee( 1001, "Jackson", "Accounting" );
+ e2.setHireDate( new Date() );
+ s.save( e2 );
+ Employee e3 = new Employee( 1002, "Jimenez", "ITech" );
+ s.save( e3 );
+ Employee e4 = new Employee( 1003, "Stejskal", "ITech" );
+ s.save( e4 );
+ Employee e5 = new Employee( 1004, "Whetbrook", "ITech" );
+ s.save( e5 );
+
+ tx.commit();
+ }
+
+ public void testProjection() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Book book = new Book(
+ 1,
+ "La chute de la petite reine a travers les yeux de Festina",
+ "La chute de la petite reine a travers les yeux de Festina, blahblah"
+ );
+ s.save( book );
+ Book book2 = new Book( 2, "Sous les fleurs il n'y a rien", null );
+ s.save( book2 );
+ Author emmanuel = new Author();
+ emmanuel.setName( "Emmanuel" );
+ s.save( emmanuel );
+ book.setMainAuthor( emmanuel );
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.stopAnalyzer );
+
+ Query query = parser.parse( "summary:Festina" );
+ org.hibernate.search.FullTextQuery hibQuery = s.createFullTextQuery( query, Book.class );
+ hibQuery.setProjection( "id", "summary", "mainAuthor.name" );
+
+ List result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( "Query with no explicit criteria", 1, result.size() );
+ Object[] projection = ( Object[] ) result.get( 0 );
+ assertEquals( "id", 1, projection[0] );
+ assertEquals( "summary", "La chute de la petite reine a travers les yeux de Festina", projection[1] );
+ assertEquals( "mainAuthor.name (embedded objects)", "Emmanuel", projection[2] );
+
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ hibQuery.setProjection( "id", "body", "mainAuthor.name" );
+
+ try {
+ hibQuery.list();
+ fail( "Projecting an unstored field should raise an exception" );
+ }
+ catch ( SearchException e ) {
+ //success
+ }
+
+
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ hibQuery.setProjection();
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( 1, result.size() );
+ assertTrue( "Should not trigger projection", result.get( 0 ) instanceof Book );
+
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ hibQuery.setProjection( (String[]) null );
+ result = hibQuery.list();
+ assertNotNull( result );
+ assertEquals( 1, result.size() );
+ assertTrue( "Should not trigger projection", result.get( 0 ) instanceof Book );
+
+ query = parser.parse( "summary:fleurs" );
+ hibQuery = s.createFullTextQuery( query, Book.class );
+ hibQuery.setProjection( "id", "summary", "mainAuthor.name" );
+ result = hibQuery.list();
+ assertEquals( 1, result.size() );
+ projection = ( Object[] ) result.get( 0 );
+ assertEquals( "mainAuthor.name", null, projection[2] );
+
+ //cleanup
+ for ( Object element : s.createQuery( "from " + Book.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ for ( Object element : s.createQuery( "from " + Author.class.getName() ).list() ) {
+ s.delete( element );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Author.class,
+ Employee.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionQueryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToDelimStringResultTransformer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToDelimStringResultTransformer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToDelimStringResultTransformer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.List;
+
+import org.hibernate.transform.ResultTransformer;
+
+/**
+ * @author John Griffin
+ */
+public class ProjectionToDelimStringResultTransformer implements ResultTransformer {
+
+ public Object transformTuple(Object[] tuple, String[] aliases) {
+ String s = tuple[0].toString();
+ for (int i = 1; i < tuple.length; i++) {
+ s = s + ", " + tuple[i].toString();
+ }
+ return s;
+ }
+
+ public List transformList(List collection) {
+ return collection;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToDelimStringResultTransformer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToMapResultTransformer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToMapResultTransformer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToMapResultTransformer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,52 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hibernate.transform.ResultTransformer;
+
+/**
+ * @author John Griffin
+ */
+public class ProjectionToMapResultTransformer implements ResultTransformer {
+
+ public Object transformTuple(Object[] tuple, String[] aliases) {
+ Map result = new HashMap( tuple.length );
+ for (int i = 0; i < tuple.length; i++) {
+ String key = aliases[i];
+ if ( key != null ) {
+ result.put( key, tuple[i] );
+ }
+ }
+ return result;
+ }
+
+ public List transformList(List collection) {
+ return collection;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ProjectionToMapResultTransformer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryLoaderTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryLoaderTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryLoaderTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,118 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class QueryLoaderTest extends SearchTestCase {
+
+ public void testWithEagerCollectionLoad() throws Exception {
+ Session sess = openSession();
+ Transaction tx = sess.beginTransaction();
+ Music music = new Music();
+ music.setTitle( "Moo Goes The Cow" );
+ Author author = new Author();
+ author.setName( "Moo Cow" );
+ music.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "Another Moo Cow" );
+ music.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "A Third Moo Cow" );
+ music.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "Random Moo Cow" );
+ music.addAuthor( author );
+ sess.persist( author );
+ sess.save( music );
+
+ Music music2 = new Music();
+ music2.setTitle( "The Cow Goes Moo" );
+ author = new Author();
+ author.setName( "Moo Cow The First" );
+ music2.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "Moo Cow The Second" );
+ music2.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "Moo Cow The Third" );
+ music2.addAuthor( author );
+ sess.persist( author );
+ author = new Author();
+ author.setName( "Moo Cow The Fourth" );
+ music2.addAuthor( author );
+ sess.persist( author );
+ sess.save( music2 );
+ tx.commit();
+ sess.clear();
+
+ FullTextSession s = Search.getFullTextSession( sess );
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "title", SearchTestCase.keywordAnalyzer );
+ Query query = parser.parse( "title:moo" );
+ FullTextQuery hibQuery = s.createFullTextQuery( query, Music.class );
+ List result = hibQuery.list();
+ assertEquals( "Should have returned 2 Books", 2, result.size() );
+ music = (Music) result.get( 0 );
+ assertEquals( "Book 1 should have four authors", 4, music.getAuthors().size() );
+ music2 = (Music) result.get( 1 );
+ assertEquals( "Book 2 should have four authors", 4, music2.getAuthors().size() );
+
+ //cleanup
+ music.getAuthors().clear();
+ music2.getAuthors().clear();
+
+ for (Object o : s.createCriteria( Object.class ).list()) {
+ s.delete( o );
+ }
+
+ tx.commit();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Author.class,
+ Music.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryLoaderTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryUnindexedEntityTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryUnindexedEntityTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryUnindexedEntityTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,74 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.HibernateException;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * HSEARCH-162 - trying to index an entity which is not marked with @Indexed
+ *
+ * @author Hardy Ferentschik
+ */
+public class QueryUnindexedEntityTest extends SearchTestCase {
+
+ public void testQueryOnAllEntities() throws Exception {
+
+ FullTextSession s = Search.getFullTextSession( openSession() );
+
+ Transaction tx = s.beginTransaction();
+ Person person = new Person();
+ person.setName( "Jon Doe" );
+ s.save( person );
+ tx.commit();
+
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "name", SearchTestCase.standardAnalyzer );
+ Query query = parser.parse( "name:foo" );
+ FullTextQuery hibQuery = s.createFullTextQuery( query );
+ try {
+ hibQuery.list();
+ fail();
+ }
+ catch ( HibernateException e ) {
+ assertTrue( "Wrong message", e.getMessage().startsWith( "There are no mapped entities" ) );
+ }
+
+ tx.rollback();
+ s.close();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Person.class,
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/QueryUnindexedEntityTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ScrollableResultsTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ScrollableResultsTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ScrollableResultsTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,242 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+import junit.framework.TestCase;
+
+/**
+ * Test for org.hibernate.search.query.ScrollableResultsImpl
+ *
+ * @see org.hibernate.search.query.ScrollableResultsImpl
+ * @author Sanne Grinovero
+ */
+public class ScrollableResultsTest extends TestCase {
+
+ private FullTextSessionBuilder builder;
+ private FullTextSession sess;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ builder = new FullTextSessionBuilder();
+ builder
+ .addAnnotatedClass( AlternateBook.class )
+ .addAnnotatedClass( Employee.class )
+ .setProperty( "hibernate.default_batch_fetch_size", "10" )
+ .build();
+ sess = builder.openFullTextSession();
+ Transaction tx = sess.beginTransaction();
+ //create some entities to query:
+ for ( int i = 0; i < 324; i++ ) {
+ sess.persist( new AlternateBook( i , "book about the number " + i ) );
+ }
+ for ( int i = 0; i < 133; i++ ) {
+ sess.persist( new Employee( i , "Rossi", "dept. num. " + i ) );
+ }
+ tx.commit();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ builder.close();
+ super.tearDown();
+ }
+
+ /**
+ * Test forward scrolling using pagination
+ */
+ public void testScrollingForward() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary", SortField.STRING ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .setFirstResult( 20 )
+ .setMaxResults( 111 )
+ .scroll();
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ assertTrue( scrollableResults.last() );
+ assertEquals( 110, scrollableResults.getRowNumber() );
+ scrollableResults.beforeFirst();
+ int position = scrollableResults.getRowNumber();
+ while ( scrollableResults.next() ) {
+ position++;
+ int bookId = position + 20;
+ assertEquals( position, scrollableResults.getRowNumber() );
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertEquals( bookId, book.getId().intValue() );
+ assertEquals( "book about the number " + bookId, book.getSummary() );
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( 110, position );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+ /**
+ * Verify inverse-order scrolling.
+ * TODO to verify correct FetchSize behavior I've been debugging
+ * the behavior; we should add a mock library to automate this kind of tests.
+ */
+ public void testScrollingBackwards() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary", SortField.STRING ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .scroll();
+ scrollableResults.beforeFirst();
+ // initial position should be -1 as in Hibernate Core
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ assertTrue( scrollableResults.last() );
+ int position = scrollableResults.getRowNumber();
+ assertEquals( 323, position );
+ while ( scrollableResults.previous() ) {
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertEquals( --position, book.getId().intValue() );
+ assertEquals( "book about the number " + position, book.getSummary() );
+ }
+ assertEquals( 0, position );
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+ /**
+ * Test that all entities returned by a ScrollableResults
+ * are always attached to Session
+ */
+ public void testResultsAreManaged() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "summary", "number") );
+ Sort sort = new Sort( new SortField( "summary", SortField.STRING ) );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, AlternateBook.class )
+ .setSort( sort )
+ .setFetchSize( 10 )
+ .scroll();
+ int position = -1;
+ while ( scrollableResults.next() ) {
+ position++;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ // evict some entities:
+ if ( position % 3 == 0 ) {
+ sess.evict( book );
+ assertFalse( sess.contains( book ) );
+ }
+ }
+ //verifies it did scroll to the end:
+ assertEquals( 323, position );
+ //assert the entities are re-attached after eviction:
+ while ( scrollableResults.previous() ) {
+ position--;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( -1, position );
+ sess.clear();
+ //assert the entities are re-attached after Session.clear:
+ while ( scrollableResults.next() ) {
+ position++;
+ AlternateBook book = (AlternateBook) scrollableResults.get()[0];
+ assertTrue( sess.contains( book ) );
+ }
+ assertEquals( 323, position );
+ tx.commit();
+ }
+
+ /**
+ * Verify scrolling works correctly when combined with Projection
+ * and that the projected entities are managed, even in case
+ * of evict usage for memory management.
+ */
+ public void testScrollProjectionAndManaged() {
+ Transaction tx = sess.beginTransaction();
+ TermQuery tq = new TermQuery( new Term( "dept", "num") );
+ ScrollableResults scrollableResults = sess
+ .createFullTextQuery( tq, Employee.class )
+ .setProjection(
+ FullTextQuery.OBJECT_CLASS,
+ FullTextQuery.ID,
+ FullTextQuery.THIS,
+ "lastname",
+ FullTextQuery.THIS
+ )
+ .setFetchSize( 10 )
+ .scroll();
+ scrollableResults.last();
+ assertEquals( 132, scrollableResults.getRowNumber() );
+ scrollableResults.beforeFirst();
+ assertEquals( -1, scrollableResults.getRowNumber() );
+ int position = scrollableResults.getRowNumber();
+ while ( scrollableResults.next() ) {
+ position++;
+ Object[] objs = scrollableResults.get();
+ assertEquals( Employee.class, objs[0] );
+ assertEquals( position, objs[1] );
+ assertTrue( objs[2] instanceof Employee );
+ sess.contains( objs[2] );
+ assertEquals( "Rossi", objs[3] );
+ assertTrue( objs[4] instanceof Employee );
+ sess.contains( objs[4] );
+ assertTrue( objs[2]==objs[4] ); //projected twice the same entity
+ // detach some objects:
+ if ( position%3 == 0 ) {
+ sess.evict( objs[2] );
+ }
+ }
+ //verify we scrolled to the end:
+ assertEquals( 132, position );
+ // and now the other way around, checking entities are attached again:
+ while ( scrollableResults.previous() ) {
+ position--;
+ Object[] objs = scrollableResults.get();
+ assertTrue( objs[2] instanceof Employee );
+ sess.contains( objs[2] );
+ assertTrue( objs[4] instanceof Employee );
+ sess.contains( objs[4] );
+ assertTrue( objs[2]==objs[4] );
+ }
+ assertEquals( -1, position );
+ scrollableResults.close();
+ tx.commit();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/ScrollableResultsTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/TermVectorTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/TermVectorTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/TermVectorTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,129 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermPositionVector;
+import org.apache.lucene.index.TermVectorOffsetInfo;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author John Griffin
+ */
+public class TermVectorTest extends SearchTestCase {
+
+ public void testPositionOffsets() throws Exception {
+ FullTextSession s = Search.getFullTextSession(openSession());
+ createIndex(s);
+
+ s.clear();
+ Transaction tx = s.beginTransaction();
+
+ // Here's how to get a reader from a FullTextSession
+ SearchFactory searchFactory = s.getSearchFactory();
+ DirectoryProvider provider = searchFactory.getDirectoryProviders(ElectricalProperties.class)[0];
+ ReaderProvider readerProvider = searchFactory.getReaderProvider();
+ IndexReader reader = readerProvider.openReader(provider);
+
+ /**
+ * Since there are so many combinations of results here, we are only going
+ * to assert a few. - J.G.
+ */
+ int x = 0;
+ TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector(x, "content");
+ assertNotNull(vector);
+ String[] terms = vector.getTerms();
+ int[] freqs = vector.getTermFrequencies();
+
+ assertEquals("electrical", terms[x]);
+ assertEquals(2, freqs[x]);
+
+ TermVectorOffsetInfo[] offsets = vector.getOffsets(x);
+ assertEquals(0, offsets[x].getStartOffset());
+ assertEquals(10, offsets[x].getEndOffset());
+
+ int[] termPositions = vector.getTermPositions(0);
+ assertEquals(0, termPositions[0]);
+ assertEquals(3, termPositions[1]);
+
+ //cleanup
+ for (Object element : s.createQuery("from " + Employee.class.getName()).list()) s.delete(element);
+ tx.commit();
+ s.close();
+ }
+
+
+ public void testNoTermVector() throws Exception {
+ FullTextSession s = Search.getFullTextSession(openSession());
+ Transaction tx = s.beginTransaction();
+
+ Employee e1 = new Employee(1000, "Griffin", "ITech");
+ s.save(e1);
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+
+ // Here's how to get a reader from a FullTextSession
+ SearchFactory searchFactory = s.getSearchFactory();
+ DirectoryProvider provider = searchFactory.getDirectoryProviders(Employee.class)[0];
+ ReaderProvider readerProvider = searchFactory.getReaderProvider();
+ IndexReader reader = readerProvider.openReader(provider);
+
+ TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector(0, "dept");
+ assertNull("should not find a term position vector", vector);
+
+ //cleanup
+ for (Object element : s.createQuery("from " + ElectricalProperties.class.getName()).list())
+ s.delete(element);
+ tx.commit();
+ s.close();
+ }
+
+ private void createIndex(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ ElectricalProperties e1 = new ElectricalProperties(1000, "Electrical Engineers measure Electrical Properties");
+ s.save(e1);
+ ElectricalProperties e2 = new ElectricalProperties(1001, "Electrical Properties are interesting");
+ s.save(e2);
+ ElectricalProperties e3 = new ElectricalProperties(1002, "Electrical Properties are measurable properties");
+ s.save(e3);
+
+ tx.commit();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[]{
+ ElectricalProperties.class,
+ Employee.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/TermVectorTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedDescriptionLibrary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedDescriptionLibrary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedDescriptionLibrary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+@Analyzer(impl = StandardAnalyzer.class)
+public class BoostedDescriptionLibrary {
+ private int id;
+ private String title;
+ private String author;
+ private String description;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getAuthor() {
+ return author;
+ }
+
+ public void setAuthor(String author) {
+ this.author = author;
+ }
+
+ @Boost(2.0F)
+ @Field(store = Store.YES, index = Index.TOKENIZED, boost = @Boost(2.0F))
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedDescriptionLibrary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedFieldDescriptionLibrary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedFieldDescriptionLibrary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedFieldDescriptionLibrary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,90 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+@Analyzer(impl = StandardAnalyzer.class)
+public class BoostedFieldDescriptionLibrary {
+ private int id;
+ private String title;
+ private String author;
+ private String description;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getAuthor() {
+ return author;
+ }
+
+ public void setAuthor(String author) {
+ this.author = author;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED, boost = @Boost(2.0F))
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedFieldDescriptionLibrary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedGetDescriptionLibrary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedGetDescriptionLibrary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedGetDescriptionLibrary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,91 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+
+import org.hibernate.search.annotations.Analyzer;
+import org.hibernate.search.annotations.Boost;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+@Analyzer(impl = StandardAnalyzer.class)
+public class BoostedGetDescriptionLibrary {
+ private int id;
+ private String title;
+ private String author;
+ private String description;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getAuthor() {
+ return author;
+ }
+
+ public void setAuthor(String author) {
+ this.author = author;
+ }
+
+ @Boost(2.0F)
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/BoostedGetDescriptionLibrary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomBoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomBoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomBoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,42 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * Example for a custom <code>BoostStrategy</code> implementation.
+ *
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.engine.BoostStrategy
+ */
+public class CustomBoostStrategy implements BoostStrategy {
+
+ public float defineBoost(Object value) {
+ DynamicBoostedDescriptionLibrary indexed = ( DynamicBoostedDescriptionLibrary ) value;
+ return indexed.getDynScore();
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomBoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomFieldBoostStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomFieldBoostStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomFieldBoostStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,46 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import org.hibernate.search.engine.BoostStrategy;
+
+/**
+ * Example for a custom <code>BoostStrategy</code> implementation.
+ *
+ * @author Hardy Ferentschik
+ * @see org.hibernate.search.engine.BoostStrategy
+ */
+public class CustomFieldBoostStrategy implements BoostStrategy {
+
+ public float defineBoost(Object value) {
+ String name = ( String ) value;
+ if ( "foobar".equals( name ) ) {
+ return 3.0f;
+ }
+ else {
+ return 1.0f;
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/CustomFieldBoostStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostedDescriptionLibrary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostedDescriptionLibrary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostedDescriptionLibrary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,85 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.DynamicBoost;
+
+/**
+ * Test entity using a custom <code>CustomBoostStrategy</code> to set
+ * the document boost as the dynScore field.
+ *
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+@DynamicBoost(impl = CustomBoostStrategy.class)
+public class DynamicBoostedDescriptionLibrary {
+
+ private int id;
+ private float dynScore;
+ private String name;
+
+ public DynamicBoostedDescriptionLibrary() {
+ dynScore = 1.0f;
+ }
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public float getDynScore() {
+ return dynScore;
+ }
+
+ public void setDynScore(float dynScore) {
+ this.dynScore = dynScore;
+ }
+
+ @Field(store = Store.YES)
+ @DynamicBoost(impl = CustomFieldBoostStrategy.class)
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostedDescriptionLibrary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostingTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostingTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostingTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,132 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import java.util.List;
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.slf4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.LoggerFactory;
+
+public class DynamicBoostingTest extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ public void testDynamicBoosts() throws Exception {
+
+ Session session = openSession();
+ session.beginTransaction();
+
+ DynamicBoostedDescriptionLibrary lib1 = new DynamicBoostedDescriptionLibrary();
+ lib1.setName( "one" );
+ session.persist( lib1 );
+
+ DynamicBoostedDescriptionLibrary lib2 = new DynamicBoostedDescriptionLibrary();
+ lib2.setName( "two" );
+ session.persist( lib2 );
+
+ session.getTransaction().commit();
+ session.close();
+
+ float lib1Score = getScore( new TermQuery( new Term( "name", "one" ) ) );
+ float lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertEquals( "The scores should be equal", lib1Score, lib2Score );
+
+ // set dynamic score and reindex!
+ session = openSession();
+ session.beginTransaction();
+
+ session.refresh( lib2 );
+ lib2.setDynScore( 2.0f );
+
+ session.getTransaction().commit();
+ session.close();
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "one" ) ) );
+ lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertTrue( "lib2score should be greater than lib1score", lib1Score < lib2Score );
+
+
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "foobar" ) ) );
+ assertEquals( "lib1score should be 0 since term is not yet indexed.", 0.0f, lib1Score );
+
+ // index foobar
+ session = openSession();
+ session.beginTransaction();
+
+ session.refresh( lib1 );
+ lib1.setName( "foobar" );
+
+ session.getTransaction().commit();
+ session.close();
+
+ lib1Score = getScore( new TermQuery( new Term( "name", "foobar" ) ) );
+ lib2Score = getScore( new TermQuery( new Term( "name", "two" ) ) );
+ assertTrue( "lib1score should be greater than lib2score", lib1Score > lib2Score );
+ }
+
+ private float getScore(Query query) {
+ Session session = openSession();
+ Object[] queryResult;
+ float score;
+ try {
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+ List resultList = fullTextSession
+ .createFullTextQuery( query, DynamicBoostedDescriptionLibrary.class )
+ .setProjection( ProjectionConstants.SCORE, ProjectionConstants.EXPLANATION )
+ .setMaxResults( 1 )
+ .list();
+
+ if ( resultList.size() == 0 ) {
+ score = 0.0f;
+ }
+ else {
+ queryResult = ( Object[] ) resultList.get( 0 );
+ score = ( Float ) queryResult[0];
+ String explanation = queryResult[1].toString();
+ log.debug( "score: " + score + " explanation: " + explanation );
+ }
+ }
+ finally {
+ session.close();
+ }
+ return score;
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ DynamicBoostedDescriptionLibrary.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/DynamicBoostingTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/FieldBoostTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/FieldBoostTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/FieldBoostTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,220 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.slf4j.Logger;
+
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author John Griffin
+ */
+public class FieldBoostTest extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ public void testBoostedGetDesc() throws Exception {
+ FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
+ buildBoostedGetIndex( fullTextSession );
+
+ fullTextSession.clear();
+ Transaction tx = fullTextSession.beginTransaction();
+
+ QueryParser authorParser = new QueryParser( getTargetLuceneVersion(), "author", SearchTestCase.standardAnalyzer );
+ QueryParser descParser = new QueryParser( getTargetLuceneVersion(), "description", SearchTestCase.standardAnalyzer );
+ Query author = authorParser.parse( "Wells" );
+ Query desc = descParser.parse( "martians" );
+
+ BooleanQuery query = new BooleanQuery();
+ query.add( author, BooleanClause.Occur.SHOULD );
+ query.add( desc, BooleanClause.Occur.SHOULD );
+ log.debug( query.toString() );
+
+ org.hibernate.search.FullTextQuery hibQuery =
+ fullTextSession.createFullTextQuery( query, BoostedGetDescriptionLibrary.class );
+ List results = hibQuery.list();
+
+ log.debug( hibQuery.explain( 0 ).toString() );
+ log.debug( hibQuery.explain( 1 ).toString() );
+
+ assertTrue(
+ "incorrect document returned",
+ ( ( BoostedGetDescriptionLibrary ) results.get( 0 ) ).getDescription().startsWith( "Martians" )
+ );
+
+ //cleanup
+ for ( Object element : fullTextSession.createQuery( "from " + BoostedGetDescriptionLibrary.class.getName() )
+ .list() ) {
+ fullTextSession.delete( element );
+ }
+ tx.commit();
+ fullTextSession.close();
+ }
+
+ public void testBoostedFieldDesc() throws Exception {
+ FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
+ buildBoostedFieldIndex( fullTextSession );
+
+ fullTextSession.clear();
+ Transaction tx = fullTextSession.beginTransaction();
+
+ QueryParser authorParser = new QueryParser( getTargetLuceneVersion(), "author", SearchTestCase.standardAnalyzer );
+ QueryParser descParser = new QueryParser( getTargetLuceneVersion(), "description", SearchTestCase.standardAnalyzer );
+ Query author = authorParser.parse( "Wells" );
+ Query desc = descParser.parse( "martians" );
+
+ BooleanQuery query = new BooleanQuery();
+ query.add( author, BooleanClause.Occur.SHOULD );
+ query.add( desc, BooleanClause.Occur.SHOULD );
+ log.debug( query.toString() );
+
+ org.hibernate.search.FullTextQuery hibQuery =
+ fullTextSession.createFullTextQuery( query, BoostedFieldDescriptionLibrary.class );
+ List results = hibQuery.list();
+
+ assertTrue(
+ "incorrect document boost",
+ ( ( BoostedFieldDescriptionLibrary ) results.get( 0 ) ).getDescription().startsWith( "Martians" )
+ );
+
+ log.debug( hibQuery.explain( 0 ).toString() );
+ log.debug( hibQuery.explain( 1 ).toString() );
+
+ //cleanup
+ for ( Object element : fullTextSession.createQuery( "from " + BoostedFieldDescriptionLibrary.class.getName() )
+ .list() ) {
+ fullTextSession.delete( element );
+ }
+ tx.commit();
+ fullTextSession.close();
+ }
+
+ public void testBoostedDesc() throws Exception {
+ FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
+ buildBoostedDescIndex( fullTextSession );
+
+ fullTextSession.clear();
+ Transaction tx = fullTextSession.beginTransaction();
+
+ QueryParser authorParser = new QueryParser( getTargetLuceneVersion(), "author", SearchTestCase.standardAnalyzer );
+ QueryParser descParser = new QueryParser( getTargetLuceneVersion(), "description", SearchTestCase.standardAnalyzer );
+ Query author = authorParser.parse( "Wells" );
+ Query desc = descParser.parse( "martians" );
+
+ BooleanQuery query = new BooleanQuery();
+ query.add( author, BooleanClause.Occur.SHOULD );
+ query.add( desc, BooleanClause.Occur.SHOULD );
+ log.debug( query.toString() );
+
+ org.hibernate.search.FullTextQuery hibQuery =
+ fullTextSession.createFullTextQuery( query, BoostedDescriptionLibrary.class );
+ List results = hibQuery.list();
+
+ log.debug( hibQuery.explain( 0 ).toString() );
+ log.debug( hibQuery.explain( 1 ).toString() );
+
+ assertTrue(
+ "incorrect document returned",
+ ( ( BoostedDescriptionLibrary ) results.get( 0 ) ).getDescription().startsWith( "Martians" )
+ );
+
+ //cleanup
+ for ( Object element : fullTextSession.createQuery( "from " + BoostedDescriptionLibrary.class.getName() )
+ .list() ) {
+ fullTextSession.delete( element );
+ }
+ tx.commit();
+ fullTextSession.close();
+ }
+
+ private void buildBoostedDescIndex(FullTextSession session) {
+ Transaction tx = session.beginTransaction();
+ BoostedDescriptionLibrary l = new BoostedDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "The Invisible Man" );
+ l.setDescription( "Scientist discovers invisibility and becomes insane." );
+ session.save( l );
+
+ l = new BoostedDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "War of the Worlds" );
+ l.setDescription( "Martians invade earth to eliminate mankind." );
+ session.save( l );
+
+ tx.commit();
+ }
+
+ private void buildBoostedFieldIndex(FullTextSession session) {
+ Transaction tx = session.beginTransaction();
+ BoostedFieldDescriptionLibrary l = new BoostedFieldDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "The Invisible Man" );
+ l.setDescription( "Scientist discovers invisibility and becomes insane." );
+ session.save( l );
+
+ l = new BoostedFieldDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "War of the Worlds" );
+ l.setDescription( "Martians invade earth to eliminate mankind." );
+ session.save( l );
+
+ tx.commit();
+ }
+
+ private void buildBoostedGetIndex(FullTextSession session) {
+ Transaction tx = session.beginTransaction();
+ BoostedGetDescriptionLibrary l = new BoostedGetDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "The Invisible Man" );
+ l.setDescription( "Scientist discovers invisibility and becomes insane." );
+ session.save( l );
+
+ l = new BoostedGetDescriptionLibrary();
+ l.setAuthor( "H.G. Wells" );
+ l.setTitle( "War of the Worlds" );
+ l.setDescription( "Martians invade earth to eliminate mankind." );
+ session.save( l );
+
+ tx.commit();
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ BoostedDescriptionLibrary.class,
+ BoostedFieldDescriptionLibrary.class,
+ BoostedGetDescriptionLibrary.class,
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/FieldBoostTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/Library.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/Library.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/Library.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,85 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.boost;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author John Griffin
+ */
+@Entity
+@Indexed
+public class Library {
+ private int id;
+ private String title;
+ private String author;
+ private String Description;
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getAuthor() {
+ return author;
+ }
+
+ public void setAuthor(String author) {
+ this.author = author;
+ }
+
+ @Field(store = Store.YES, index = Index.TOKENIZED)
+ public String getDescription() {
+ return Description;
+ }
+
+ public void setDescription(String description) {
+ Description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/boost/Library.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/AbstractCar.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/AbstractCar.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/AbstractCar.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.criteria;
+
+import javax.persistence.DiscriminatorColumn;
+import javax.persistence.DiscriminatorType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Inheritance;
+import javax.persistence.InheritanceType;
+import javax.persistence.Table;
+
+import org.hibernate.search.annotations.Field;
+
+@Entity
+@Table(name = "Car")
+@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
+@DiscriminatorColumn(name = "DISC", discriminatorType = DiscriminatorType.STRING, length = 5)
+public abstract class AbstractCar {
+
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String kurztext;
+
+ private boolean hasColor = false;
+
+ protected AbstractCar() {
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getKurztext() {
+ return kurztext;
+ }
+
+ public void setKurztext(final String kurztext) {
+ this.kurztext = kurztext;
+ }
+
+ public boolean isHasColor() {
+ return hasColor;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/AbstractCar.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/Bike.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/Bike.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/Bike.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,67 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.criteria;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Field;
+
+@Entity
+public class Bike {
+
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String kurztext;
+
+ private boolean hasColor = false;
+
+ protected Bike() {
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getKurztext() {
+ return kurztext;
+ }
+
+ public void setKurztext(final String kurztext) {
+ this.kurztext = kurztext;
+ }
+
+ public boolean isHasColor() {
+ return hasColor;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/Bike.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/CombiCar.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/CombiCar.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/CombiCar.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.criteria;
+
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+
+@Entity
+@DiscriminatorValue(value = "Combi")
+@Indexed
+public class CombiCar extends AbstractCar {
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/CombiCar.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/MixedCriteriaTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/MixedCriteriaTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/MixedCriteriaTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,149 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.criteria;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Criteria;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class MixedCriteriaTest extends SearchTestCase {
+ /**
+ * HSEARCH-360
+ */
+ public void testCriteriaWithFilteredEntity() throws Exception {
+ indexTestData();
+
+ // Search
+ Session session = openSession();
+ Transaction tx = session.beginTransaction();
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+
+ MultiFieldQueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(),
+ new String[] { "kurztext" }, SearchTestCase.standardAnalyzer
+ );
+ Query query = parser.parse( "combi OR sport" );
+
+ Criteria criteria = session.createCriteria( AbstractCar.class );
+ criteria.add( Restrictions.eq( "hasColor", Boolean.FALSE ) );
+
+ org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery( query, AbstractCar.class )
+ .setCriteriaQuery( criteria );
+ List result = hibQuery.list();
+ assertEquals( 2, result.size() );
+ tx.commit();
+ session.close();
+ }
+
+ public void testCriteriaWithoutFilteredEntity() throws Exception {
+ indexTestData();
+
+ // Search
+ Session session = openSession();
+ Transaction tx = session.beginTransaction();
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+
+ MultiFieldQueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(),
+ new String[] { "kurztext" }, SearchTestCase.standardAnalyzer
+ );
+ Query query = parser.parse( "combi OR sport" );
+
+ Criteria criteria = session.createCriteria( AbstractCar.class );
+ criteria.add( Restrictions.eq( "hasColor", Boolean.FALSE ) );
+
+ org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery( query )
+ .setCriteriaQuery( criteria );
+ List result = hibQuery.list();
+ assertEquals( 2, result.size() );
+ tx.commit();
+ session.close();
+ }
+
+ public void testCriteriaWithMultipleEntities() throws Exception {
+ indexTestData();
+
+ // Search
+ Session session = openSession();
+ Transaction tx = session.beginTransaction();
+ FullTextSession fullTextSession = Search.getFullTextSession( session );
+
+ MultiFieldQueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(),
+ new String[] { "kurztext" }, SearchTestCase.standardAnalyzer
+ );
+ Query query = parser.parse( "combi OR sport" );
+
+ Criteria criteria = session.createCriteria( AbstractCar.class );
+ criteria.add( Restrictions.eq( "hasColor", Boolean.FALSE ) );
+
+ try {
+ org.hibernate.Query hibQuery = fullTextSession.createFullTextQuery( query, AbstractCar.class, Bike.class )
+ .setCriteriaQuery( criteria );
+ hibQuery.list();
+ fail();
+ }
+ catch ( SearchException se ) {
+ assertEquals( "Cannot mix criteria and multiple entity types", se.getMessage() );
+ }
+ tx.commit();
+ session.close();
+ }
+
+ private void indexTestData() {
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+
+ CombiCar combi = new CombiCar();
+ combi.setKurztext( "combi" );
+ s.persist( combi );
+
+ SportCar sport = new SportCar();
+ sport.setKurztext( "sport" );
+ s.persist( sport );
+
+ Bike bike = new Bike();
+ bike.setKurztext( "bike" );
+ s.persist( bike );
+ tx.commit();
+ s.close();
+ }
+
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ AbstractCar.class, CombiCar.class, SportCar.class, Bike.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/MixedCriteriaTest.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/SportCar.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/SportCar.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/SportCar.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.criteria;
+
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+
+@Entity
+@DiscriminatorValue(value = "Sport")
+@Indexed
+public class SportCar extends AbstractCar {
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/criteria/SportCar.java
___________________________________________________________________
Name: svn:executable
+ *
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/Dvd.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/Dvd.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/Dvd.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,76 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.explain;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Dvd {
+ @Id @GeneratedValue @DocumentId private Integer id;
+ private @Field String title;
+ private @Field String description;
+
+ protected Dvd() {
+ }
+
+ public Dvd(String title, String description) {
+ this.title = title;
+ this.description = description;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/Dvd.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/ExplanationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/ExplanationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/ExplanationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,77 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.query.explain;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.Transaction;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ExplanationTest extends SearchTestCase {
+ public void testExplanation() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ Dvd dvd = new Dvd("The dark knight", "Batman returns with his best enemy the Joker. The dark side of this movies shows up pretty quickly");
+ s.persist( dvd );
+ dvd = new Dvd("Wall-e", "The tiny little robot comes to Earth after the dark times and tries to clean it");
+ s.persist( dvd );
+ tx.commit();
+ s.clear();
+
+ tx = s.beginTransaction();
+ Map<String, Float> boosts = new HashMap<String, Float>(2);
+ boosts.put( "title", new Float(4) );
+ boosts.put( "description", new Float(1) );
+ MultiFieldQueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(), new String[] {"title", "description"},
+ SearchTestCase.standardAnalyzer, boosts );
+ Query luceneQuery = parser.parse( "dark" );
+ FullTextQuery ftQuery = s.createFullTextQuery( luceneQuery, Dvd.class )
+ .setProjection( FullTextQuery.DOCUMENT_ID, FullTextQuery.EXPLANATION, FullTextQuery.THIS );
+ @SuppressWarnings("unchecked") List<Object[]> results = ftQuery.list();
+ assertEquals( 2, results.size() );
+ for (Object[] result : results) {
+ assertEquals( ftQuery.explain( (Integer) result[0] ).toString(), result[1].toString() );
+ s.delete( result[2] );
+ }
+ tx.commit();
+ s.close();
+
+ }
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Dvd.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/query/explain/ExplanationTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Detective.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Detective.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Detective.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,85 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Detective {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.TOKENIZED)
+ private String physicalDescription;
+ @Field(index = Index.UN_TOKENIZED)
+ private String badge;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPhysicalDescription() {
+ return physicalDescription;
+ }
+
+ public void setPhysicalDescription(String physicalDescription) {
+ this.physicalDescription = physicalDescription;
+ }
+
+ public String getBadge() {
+ return badge;
+ }
+
+ public void setBadge(String badge) {
+ this.badge = badge;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Detective.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,38 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class NotSharedReaderPerfTest extends ReaderPerfTestCase {
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.READER_STRATEGY, "not-shared" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/NotSharedReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/ReaderPerfTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/ReaderPerfTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,259 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import java.io.File;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.slf4j.Logger;
+
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public abstract class ReaderPerfTestCase extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for ( File file : files ) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Detective.class,
+ Suspect.class
+ };
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ if ( getSessions() != null ) {
+ getSessions().close();
+ }
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ public boolean insert = true;
+
+ public void testConcurrency() throws Exception {
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ for ( int index = 0; index < 5000; index++ ) {
+ Detective detective = new Detective();
+ detective.setName( "John Doe " + index );
+ detective.setBadge( "123455" + index );
+ detective.setPhysicalDescription( "Blond green eye etc etc" );
+ s.persist( detective );
+ Suspect suspect = new Suspect();
+ suspect.setName( "Jane Doe " + index );
+ suspect.setPhysicalDescription( "brunette, short, 30-ish" );
+ if ( index % 20 == 0 ) {
+ suspect.setSuspectCharge( "thief liar " );
+ }
+ else {
+ suspect.setSuspectCharge(
+ " It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery. Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud."
+ );
+ }
+ s.persist( suspect );
+ }
+ tx.commit();
+ s.close();
+
+ Thread.sleep( 1000 );
+
+ int nThreads = 15;
+ ExecutorService es = Executors.newFixedThreadPool( nThreads );
+ Work work = new Work( getSessions() );
+ ReverseWork reverseWork = new ReverseWork( getSessions() );
+ long start = System.currentTimeMillis();
+ int iteration = 100;
+ log.info( "Starting worker threads." );
+ for ( int i = 0; i < iteration; i++ ) {
+ es.execute( work );
+ es.execute( reverseWork );
+ }
+ while ( work.count < iteration - 1 ) {
+ Thread.sleep( 20 );
+ }
+ log.debug( iteration + " iterations in " + nThreads + " threads: " + ( System.currentTimeMillis() - start ) );
+ }
+
+ protected class Work implements Runnable {
+ private Random random = new Random();
+ private SessionFactory sf;
+ public volatile int count = 0;
+
+ public Work(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(),
+ new String[] { "name", "physicalDescription", "suspectCharge" },
+ SearchTestCase.standardAnalyzer
+ );
+ FullTextQuery query = getQuery( "John Doe", parser, s );
+ assertTrue( query.getResultSize() != 0 );
+
+ query = getQuery( "green", parser, s );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+
+ query = getQuery( "John Doe", parser, s );
+ assertTrue( query.getResultSize() != 0 );
+
+ query = getQuery( "thief", parser, s );
+ int firstResult = random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( firstResult );
+ query.setMaxResults( 10 );
+ List result = query.list();
+ Object object = result.get( 0 );
+ if ( insert && object instanceof Detective ) {
+ Detective detective = ( Detective ) object;
+ detective.setPhysicalDescription( detective.getPhysicalDescription() + " Eye" + firstResult );
+ }
+ else if ( insert && object instanceof Suspect ) {
+ Suspect suspect = ( Suspect ) object;
+ suspect.setPhysicalDescription( suspect.getPhysicalDescription() + " Eye" + firstResult );
+ }
+ tx.commit();
+ s.close();
+ count++;
+ }
+
+ private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
+ Query luceneQuery = null;
+ try {
+ luceneQuery = parser.parse( queryString );
+ }
+ catch ( ParseException e ) {
+ e.printStackTrace();
+ }
+ return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );
+ }
+ }
+
+ protected static class ReverseWork implements Runnable {
+ private SessionFactory sf;
+ private Random random = new Random();
+
+ public ReverseWork(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ QueryParser parser = new MultiFieldQueryParser( getTargetLuceneVersion(),
+ new String[] { "name", "physicalDescription", "suspectCharge" },
+ SearchTestCase.standardAnalyzer
+ );
+ FullTextQuery query = getQuery( "John Doe", parser, s );
+ assertTrue( query.getResultSize() != 0 );
+
+ query = getQuery( "london", parser, s );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+
+ getQuery( "John Doe", parser, s );
+ assertTrue( query.getResultSize() != 0 );
+
+ query = getQuery( "green", parser, s );
+ random.nextInt( query.getResultSize() - 15 );
+ query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
+ query.setMaxResults( 10 );
+ query.list();
+ tx.commit();
+ s.close();
+ }
+
+ private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
+ Query luceneQuery = null;
+ try {
+ luceneQuery = parser.parse( queryString );
+ }
+ catch ( ParseException e ) {
+ e.printStackTrace();
+ }
+ return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );
+ }
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/ReaderPerfTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedBufferedReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedBufferedReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedBufferedReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.reader.SharingBufferReaderProvider;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SharedBufferedReaderPerfTest extends ReaderPerfTestCase {
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.READER_STRATEGY, SharingBufferReaderProvider.class.getCanonicalName() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedBufferedReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.Environment;
+import org.hibernate.search.reader.SharedReaderProvider;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@SuppressWarnings("deprecation")
+public class SharedReaderPerfTest extends ReaderPerfTestCase {
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.READER_STRATEGY, SharedReaderProvider.class.getCanonicalName() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/SharedReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Suspect.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Suspect.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Suspect.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,87 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Column;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Suspect {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+ @Field(index = Index.TOKENIZED)
+ private String physicalDescription;
+ @Field(index = Index.TOKENIZED)
+ @Column(length = 500)
+ private String suspectCharge;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPhysicalDescription() {
+ return physicalDescription;
+ }
+
+ public void setPhysicalDescription(String physicalDescription) {
+ this.physicalDescription = physicalDescription;
+ }
+
+ public String getSuspectCharge() {
+ return suspectCharge;
+ }
+
+ public void setSuspectCharge(String suspectCharge) {
+ this.suspectCharge = suspectCharge;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/Suspect.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/FilterOnDirectoryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/FilterOnDirectoryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/FilterOnDirectoryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,94 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.functionality;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.reader.SharingBufferReaderProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.reader.Detective;
+import org.hibernate.search.test.reader.Suspect;
+
+public class FilterOnDirectoryTest extends SearchTestCase {
+
+ public void testFilteredClasses() throws Exception {
+ createDoeFamily();
+ FullTextSession fts = Search.getFullTextSession( openSession() );
+ Transaction tx = fts.beginTransaction();
+ Query q = new TermQuery( new Term( "name", "doe" ) );
+
+ assertEquals( 2, fts.createFullTextQuery( q ).getResultSize() );
+ assertEquals( 2, fts.createFullTextQuery( q, Detective.class, Suspect.class ).getResultSize() );
+
+ FullTextQuery detectiveQuery = fts.createFullTextQuery( q, Detective.class );
+ assertEquals( 1, detectiveQuery.getResultSize() );
+ assertTrue( detectiveQuery.list().get(0) instanceof Detective );
+
+ FullTextQuery suspectQuery = fts.createFullTextQuery( q, Suspect.class );
+ assertEquals( 1, suspectQuery.getResultSize() );
+ assertTrue( suspectQuery.list().get(0) instanceof Suspect );
+
+ assertEquals( 2, fts.createFullTextQuery( q ).getResultSize() );
+ assertEquals( 2, fts.createFullTextQuery( q, Detective.class, Suspect.class ).getResultSize() );
+
+ tx.commit();
+ fts.close();
+ }
+
+ private void createDoeFamily() {
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+ Detective detective = new Detective();
+ detective.setName( "John Doe" );
+ s.persist( detective );
+ Suspect suspect = new Suspect();
+ suspect.setName( "Jane Doe" );
+ s.persist( suspect );
+ tx.commit();
+ s.close();
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StandardAnalyzer.class.getName() );
+ cfg.setProperty( Environment.READER_STRATEGY, SharingBufferReaderProvider.class.getName() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Detective.class,
+ Suspect.class
+ };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/FilterOnDirectoryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/SharingBufferIndexProviderTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/SharingBufferIndexProviderTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/SharingBufferIndexProviderTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,128 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.functionality;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.index.IndexReader;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.reader.functionality.TestableSharingBufferReaderProvider.MockIndexReader;
+import org.hibernate.search.test.reader.functionality.TestableSharingBufferReaderProvider.TestManipulatorPerDP;
+
+import junit.framework.TestCase;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class SharingBufferIndexProviderTest extends TestCase {
+
+ private final TestableSharingBufferReaderProvider readerProvider = new TestableSharingBufferReaderProvider();
+ private final CountDownLatch startSignal = new CountDownLatch(1);
+ private final Runnable searchTask = new SearchTask();
+ private final Runnable changeTask = new ChangeTask();
+ private final AtomicInteger countDoneSearches = new AtomicInteger();
+ private final AtomicInteger countDoneIndexmods = new AtomicInteger();
+ private static final int SEARCHES_NUM = 50000;
+ private static final Random random = new Random();
+
+ public void testStressingMock() throws InterruptedException {
+ readerProvider.initialize(null, null);
+ ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool( 200 );//much chaos
+ for ( int i = 0; i < SEARCHES_NUM; i++ ) {
+ executor.execute( makeTask( i ) );
+ }
+ executor.shutdown();
+ startSignal.countDown();
+ executor.awaitTermination( 500, TimeUnit.SECONDS );
+ assertTrue( "memory leak: holding a reference to some unused IndexReader", readerProvider.areAllOldReferencesGone() );
+ for ( MockIndexReader reader : readerProvider.getCreatedIndexReaders() ) {
+ if ( readerProvider.isReaderCurrent( reader ) ) {
+ assertTrue( "the most current reader should be open", ! reader.isClosed() );
+ }
+ else {
+ assertTrue( "an IndexReader is still open", reader.isClosed() );
+ }
+ }
+ assertEquals( SEARCHES_NUM, countDoneSearches.get() );
+ assertEquals( SEARCHES_NUM/10, countDoneIndexmods.get() );
+ }
+
+ private Runnable makeTask(int i) {
+ if ( i % 10 == 0) {
+ return changeTask;
+ }
+ else {
+ return searchTask;
+ }
+ }
+
+ private DirectoryProvider[] getRandomAvailableDPs() {
+ int arraySize = random.nextInt( readerProvider.manipulators.size() - 1 ) + 1;
+ DirectoryProvider[] array = new DirectoryProvider[arraySize];
+ List<DirectoryProvider> availableDPs = new ArrayList<DirectoryProvider>( readerProvider.directoryProviders );
+ for (int i=0; i<arraySize; i++){
+ int chosenDpIndex = random.nextInt( availableDPs.size() );
+ array[i] = availableDPs.get( chosenDpIndex );
+ availableDPs.remove( array[i] );
+ }
+ return array;
+ }
+
+ private class SearchTask implements Runnable {
+ public void run() {
+ try {
+ startSignal.await();
+ } catch (InterruptedException e) {
+ //manage termination:
+ return;
+ }
+ IndexReader fakeOpenReader = readerProvider.openReader( getRandomAvailableDPs() );
+ Thread.yield();
+ readerProvider.closeReader( fakeOpenReader );
+ countDoneSearches.incrementAndGet();
+ }
+ }
+
+ private class ChangeTask extends SearchTask {
+ public void run() {
+ super.run();
+ Thread.yield();
+ DirectoryProvider[] randomEvailableDPs = getRandomAvailableDPs();
+ for ( DirectoryProvider dp : randomEvailableDPs ) {
+ TestManipulatorPerDP testManipulatorPerDP = readerProvider.manipulators.get( dp.getDirectory() );
+ testManipulatorPerDP.setIndexChanged();
+ }
+ countDoneIndexmods.incrementAndGet();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/SharingBufferIndexProviderTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/TestableSharingBufferReaderProvider.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/TestableSharingBufferReaderProvider.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/TestableSharingBufferReaderProvider.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,303 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.functionality;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Vector;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.TermEnum;
+import org.apache.lucene.index.TermFreqVector;
+import org.apache.lucene.index.TermPositions;
+import org.apache.lucene.index.TermVectorMapper;
+import org.apache.lucene.store.Directory;
+
+import org.hibernate.search.SearchException;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.reader.ReaderProviderHelper;
+import org.hibernate.search.reader.SharingBufferReaderProvider;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.RAMDirectoryProvider;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class TestableSharingBufferReaderProvider extends SharingBufferReaderProvider {
+
+ private static final int NUM_DIRECTORY_PROVIDERS = 4;
+ private final Vector<MockIndexReader> createdReadersHistory = new Vector<MockIndexReader>( 500 );
+ final Map<Directory, TestManipulatorPerDP> manipulators = new ConcurrentHashMap<Directory, TestManipulatorPerDP>();
+ final List<DirectoryProvider> directoryProviders = Collections.synchronizedList(new ArrayList<DirectoryProvider>());
+
+ public TestableSharingBufferReaderProvider() {
+ for ( int i = 0; i < NUM_DIRECTORY_PROVIDERS; i++ ) {
+ TestManipulatorPerDP tm = new TestManipulatorPerDP( i );
+ manipulators.put( tm.dp.getDirectory(), tm );
+ directoryProviders.add( tm.dp );
+ }
+ }
+
+ public static class TestManipulatorPerDP {
+ private final AtomicBoolean isIndexReaderCurrent = new AtomicBoolean( false );//starts at true, see MockIndexReader contructor
+ private final AtomicBoolean isReaderCreated = new AtomicBoolean( false );
+ private final DirectoryProvider dp = new RAMDirectoryProvider();
+
+ public TestManipulatorPerDP(int seed) {
+ dp.initialize( "dp" + seed, new Properties(), null );
+ dp.start();
+ }
+
+ public void setIndexChanged() {
+ isIndexReaderCurrent.set( false );
+ }
+
+ }
+
+ public boolean isReaderCurrent(MockIndexReader reader) {
+ //avoid usage of allReaders or test would be useless
+ for ( PerDirectoryLatestReader latest : currentReaders.values() ) {
+ IndexReader latestReader = latest.current.reader;
+ if ( latestReader == reader ) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ protected IndexReader readerFactory(Directory directory) {
+ TestManipulatorPerDP manipulatorPerDP = manipulators.get( directory );
+ if ( !manipulatorPerDP.isReaderCreated.compareAndSet( false, true ) ) {
+ throw new IllegalStateException( "IndexReader1 created twice" );
+ }
+ else {
+ return new MockIndexReader( manipulatorPerDP.isIndexReaderCurrent );
+ }
+ }
+
+ @Override
+ public void initialize(Properties props, SearchFactoryImplementor searchFactoryImplementor) {
+ try {
+ for ( Directory directory : manipulators.keySet() ) {
+ currentReaders.put( directory, new PerDirectoryLatestReader( directory ) );
+ }
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Unable to open Lucene IndexReader", e );
+ }
+ }
+
+ public boolean areAllOldReferencesGone() {
+ int numReferencesReaders = allReaders.size();
+ int numExpectedActiveReaders = manipulators.size();
+ return numReferencesReaders == numExpectedActiveReaders;
+ }
+
+ public List<MockIndexReader> getCreatedIndexReaders() {
+ return createdReadersHistory;
+ }
+
+ public MockIndexReader getCurrentMockReaderPerDP(DirectoryProvider dp) {
+ IndexReader[] indexReaders = ReaderProviderHelper.getSubReadersFromMultiReader(
+ ( MultiReader ) super.openReader(
+ new DirectoryProvider[] { dp }
+ )
+ );
+ if ( indexReaders.length != 1 ) {
+ throw new IllegalStateException( "Expecting one reader" );
+ }
+ return ( MockIndexReader ) indexReaders[0];
+ }
+
+ public class MockIndexReader extends IndexReader {
+
+ private final AtomicBoolean closed = new AtomicBoolean( false );
+ private final AtomicBoolean hasAlreadyBeenReOpened = new AtomicBoolean( false );
+ private final AtomicBoolean isIndexReaderCurrent;
+
+ MockIndexReader(AtomicBoolean isIndexReaderCurrent) {
+ this.isIndexReaderCurrent = isIndexReaderCurrent;
+ if ( !isIndexReaderCurrent.compareAndSet( false, true ) ) {
+ throw new IllegalStateException( "Unnecessarily reopened" );
+ }
+ createdReadersHistory.add( this );
+ }
+
+ public final boolean isClosed() {
+ return closed.get();
+ }
+
+ @Override
+ protected void doClose() throws IOException {
+ boolean okToClose = closed.compareAndSet( false, true );
+ if ( !okToClose ) {
+ throw new IllegalStateException( "Attempt to close a closed IndexReader" );
+ }
+ if ( !hasAlreadyBeenReOpened.get() ) {
+ throw new IllegalStateException( "Attempt to close the most current IndexReader" );
+ }
+ }
+
+ @Override
+ public synchronized IndexReader reopen() {
+ if ( isIndexReaderCurrent.get() ) {
+ return this;
+ }
+ else {
+ if ( hasAlreadyBeenReOpened.compareAndSet( false, true ) ) {
+ return new MockIndexReader( isIndexReaderCurrent );
+ }
+ else {
+ throw new IllegalStateException( "Attempt to reopen an old IndexReader more than once" );
+ }
+ }
+ }
+
+ @Override
+ protected void doDelete(int docNum) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected void doSetNorm(int doc, String field, byte value) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected void doUndeleteAll() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public int docFreq(Term t) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Document document(int n, FieldSelector fieldSelector) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Collection getFieldNames(FieldOption fldOption) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermFreqVector getTermFreqVector(int docNumber, String field) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void getTermFreqVector(int docNumber, String field, TermVectorMapper mapper) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void getTermFreqVector(int docNumber, TermVectorMapper mapper) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermFreqVector[] getTermFreqVectors(int docNumber) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean hasDeletions() {
+ return false;//just something to make MultiReader constructor happy
+ }
+
+ @Override
+ public boolean isDeleted(int n) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public int maxDoc() {
+ return 10;//just something to make MultiReader constructor happy
+ }
+
+ @Override
+ public byte[] norms(String field) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void norms(String field, byte[] bytes, int offset) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public int numDocs() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermDocs termDocs() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermPositions termPositions() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermEnum terms() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TermEnum terms(Term t) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+// @Override not defined in Lucene 2.9, added in 3.0
+ protected void doCommit(Map<String, String> commitUserData) {
+ throw new UnsupportedOperationException();
+ }
+
+// @Override not defined in Lucene 3.0, existed before
+ protected void doCommit() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/functionality/TestableSharingBufferReaderProvider.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/AbstractActivity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/AbstractActivity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/AbstractActivity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,104 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Sanne Grinovero
+ */
+public abstract class AbstractActivity implements Runnable {
+
+ private final ThreadLocal<QueryParser> parsers = new ThreadLocal<QueryParser>(){
+ @Override
+ protected QueryParser initialValue(){
+ return new MultiFieldQueryParser( SearchTestCase.getTargetLuceneVersion(),
+ new String[] {"name", "physicalDescription", "suspectCharge"},
+ SearchTestCase.standardAnalyzer );
+ }
+ };
+
+ private final SessionFactory sf;
+ private final AtomicInteger jobSeed = new AtomicInteger();
+ private final CountDownLatch startSignal;
+
+ AbstractActivity(SessionFactory sf, CountDownLatch startSignal) {
+ this.startSignal = startSignal;
+ this.sf = sf;
+ }
+
+ public final void run() {
+ try {
+ startSignal.await();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ return;
+ }
+ Session s = sf.openSession();
+ try {
+ FullTextSession fts = Search.getFullTextSession( s );
+ Transaction tx = s.beginTransaction();
+ boolean ok = false;
+ try {
+ doAction( fts, jobSeed.getAndIncrement() );
+ ok = true;
+ } finally {
+ if (ok)
+ tx.commit();
+ else
+ tx.rollback();
+ }
+ } finally {
+ s.close();
+ }
+ }
+
+ protected FullTextQuery getQuery(String queryString, FullTextSession s, Class... classes) {
+ Query luceneQuery = null;
+ try {
+ luceneQuery = parsers.get().parse(queryString);
+ }
+ catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return s.createFullTextQuery( luceneQuery, classes );
+ }
+
+ protected abstract void doAction(FullTextSession s, int jobSeed);
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/AbstractActivity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/BufferSharingReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/BufferSharingReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/BufferSharingReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,39 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import org.hibernate.search.reader.SharingBufferReaderProvider;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class BufferSharingReaderPerfTest extends ReaderPerformance {
+
+ @Override
+ protected String getReaderStrategyName() {
+ return SharingBufferReaderProvider.class.getName();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/BufferSharingReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/IndexFillRunnable.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/IndexFillRunnable.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/IndexFillRunnable.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,67 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Index;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.IndexWriter;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class IndexFillRunnable implements Runnable {
+
+ private volatile int jobSeed = 0;
+ private final IndexWriter iw;
+
+ public IndexFillRunnable(IndexWriter iw) {
+ super();
+ this.iw = iw;
+ }
+
+ public void run() {
+ Field f1 = new Field( "name", "Some One " + jobSeed++, Store.NO, Index.ANALYZED );
+ Field f2 = new Field(
+ "physicalDescription",
+ " just more people sitting around and filling my index... ",
+ Store.NO,
+ Index.ANALYZED
+ );
+ Document d = new Document();
+ d.add( f1 );
+ d.add( f2 );
+ try {
+ iw.addDocument( d );
+ }
+ catch ( IOException e ) {
+ e.printStackTrace();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/IndexFillRunnable.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/InsertActivity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/InsertActivity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/InsertActivity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,62 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.util.concurrent.CountDownLatch;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.test.reader.Detective;
+import org.hibernate.search.test.reader.Suspect;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class InsertActivity extends AbstractActivity {
+
+ InsertActivity(SessionFactory sf, CountDownLatch startSignal) {
+ super(sf, startSignal);
+ }
+
+ @Override
+ protected void doAction(FullTextSession s, int jobSeed) {
+ Detective detective = new Detective();
+ detective.setName("John Doe " + jobSeed);
+ detective.setBadge("123455" + jobSeed);
+ detective.setPhysicalDescription("Blond green eye etc etc");
+ s.persist(detective);
+ Suspect suspect = new Suspect();
+ suspect.setName("Jane Doe " + jobSeed);
+ suspect.setPhysicalDescription("brunette, short, 30-ish");
+ if (jobSeed % 20 == 0) {
+ suspect.setSuspectCharge("thief liar ");
+ } else {
+ suspect.setSuspectCharge(" It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery. Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud.");
+ }
+ s.persist(suspect);
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/InsertActivity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/NotSharedReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/NotSharedReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/NotSharedReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class NotSharedReaderPerfTest extends ReaderPerformance {
+
+ @Override
+ protected String getReaderStrategyName() {
+ return "not-shared";
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/NotSharedReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/ReaderPerformance.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/ReaderPerformance.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/ReaderPerformance.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,150 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.hibernate.search.Environment;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.reader.Detective;
+import org.hibernate.search.test.reader.Suspect;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * To enable performance tests: de-comment buildBigIndex(); in setUp() and rename no_testPerformance
+ * @author Sanne Grinovero
+ */
+public abstract class ReaderPerformance extends SearchTestCase {
+
+ //more iterations for more reliable measures:
+ private static final int TOTAL_WORK_BATCHES = 10;
+ //the next 3 define the kind of workload mix to test on:
+ private static final int SEARCHERS_PER_BATCH = 10;
+ private static final int UPDATES_PER_BATCH = 2;
+ private static final int INSERTIONS_PER_BATCH = 1;
+
+ private static final int WORKER_THREADS = 20;
+
+ private static final int WARMUP_CYCLES = 6;
+
+ protected void setUp() throws Exception {
+ File baseIndexDir = getBaseIndexDir();
+ baseIndexDir.mkdir();
+ File[] files = baseIndexDir.listFiles();
+ for ( File file : files ) {
+ FileHelper.delete( file );
+ }
+ super.setUp();
+ }
+
+ private void buildBigIndex() throws InterruptedException, CorruptIndexException, LockObtainFailedException, IOException {
+ System.out.println( "Going to create fake index..." );
+ FSDirectory directory = FSDirectory.open(new File(getBaseIndexDir(), Detective.class.getCanonicalName()));
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( directory, new SimpleAnalyzer(), true, fieldLength );
+ IndexFillRunnable filler = new IndexFillRunnable( iw );
+ ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool( WORKER_THREADS );
+ for (int batch=0; batch<=5000000; batch++){
+ executor.execute( filler );
+ }
+ executor.shutdown();
+ executor.awaitTermination( 600, TimeUnit.SECONDS );
+ iw.commit();
+ iw.optimize();
+ iw.close();
+ System.out.println( "Index created." );
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Detective.class,
+ Suspect.class
+ };
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ FileHelper.delete( getBaseIndexDir() );
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( "hibernate.search.default.indexBase", getBaseIndexDir().getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.default.optimizer.transaction_limit.max", "10" ); // workaround too many open files
+ cfg.setProperty( "hibernate.search.default." + Environment.EXCLUSIVE_INDEX_USE, "true" );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( Environment.READER_STRATEGY, getReaderStrategyName() );
+ }
+
+ protected abstract String getReaderStrategyName();
+
+ public final void testPerformance() throws InterruptedException, CorruptIndexException, LockObtainFailedException, IOException {
+ buildBigIndex();
+ for (int i=0; i<WARMUP_CYCLES; i++) {
+ timeMs();
+ }
+ }
+
+ private final void timeMs() throws InterruptedException {
+ ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool( WORKER_THREADS );
+ CountDownLatch startSignal = new CountDownLatch(1);
+ InsertActivity insertionTask = new InsertActivity( getSessions(), startSignal );
+ SearchActivity searchTask = new SearchActivity( getSessions(), startSignal );
+ UpdateActivity updateTask = new UpdateActivity( getSessions(), startSignal );
+ //we declare needed activities in order, scheduler will "mix":
+ for (int batch=0; batch<=TOTAL_WORK_BATCHES; batch++){
+ for ( int inserters=0; inserters<INSERTIONS_PER_BATCH; inserters++)
+ executor.execute( insertionTask );
+ for ( int searchers=0; searchers<SEARCHERS_PER_BATCH; searchers++)
+ executor.execute( searchTask );
+ for ( int updaters=0; updaters<UPDATES_PER_BATCH; updaters++)
+ executor.execute( updateTask );
+ }
+ executor.shutdown();
+ long startTime = System.currentTimeMillis();
+ startSignal.countDown();//start!
+ executor.awaitTermination( 600, TimeUnit.SECONDS );
+ long endTime = System.currentTimeMillis();
+ System.out.println( "Performance test for " + getReaderStrategyName() + ": " + (endTime - startTime) +"ms. (" +
+ (TOTAL_WORK_BATCHES*SEARCHERS_PER_BATCH) + " searches, " +
+ (TOTAL_WORK_BATCHES*INSERTIONS_PER_BATCH) + " insertions, " +
+ (TOTAL_WORK_BATCHES*UPDATES_PER_BATCH) + " updates)" );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/ReaderPerformance.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SearchActivity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SearchActivity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SearchActivity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.util.concurrent.CountDownLatch;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.test.reader.Detective;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Sanne Grinovero
+ */
+public class SearchActivity extends AbstractActivity {
+
+ SearchActivity(SessionFactory sf, CountDownLatch startSignal) {
+ super(sf, startSignal);
+ }
+
+ @Override
+ protected void doAction(FullTextSession s, int jobSeed) {
+ FullTextQuery q = getQuery( "John Doe", s, Detective.class);
+ q.setMaxResults( 10 );
+ q.getResultSize();
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SearchActivity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SharedReaderPerfTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SharedReaderPerfTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SharedReaderPerfTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,37 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class SharedReaderPerfTest extends ReaderPerformance {
+
+ @Override
+ protected String getReaderStrategyName() {
+ return "shared";
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/SharedReaderPerfTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/UpdateActivity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/UpdateActivity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/UpdateActivity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.reader.performance;
+
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.test.reader.Detective;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class UpdateActivity extends AbstractActivity {
+
+ UpdateActivity(SessionFactory sf, CountDownLatch startSignal) {
+ super(sf, startSignal);
+ }
+
+ @Override
+ protected void doAction(FullTextSession s, int jobSeed) {
+ FullTextQuery q = getQuery( "John", s, Detective.class );
+ List list = q.setMaxResults( 1 ).list();
+ for ( Object o : list){
+ Detective detective = (Detective) o;
+ detective.setPhysicalDescription( "old" );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/reader/performance/UpdateActivity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Categorie.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Categorie.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Categorie.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,79 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Indexed
+@Entity
+public class Categorie {
+
+ @DocumentId
+ @Id @GeneratedValue
+ private Integer id;
+
+ @Field( index = Index.TOKENIZED, store = Store.YES )
+ private String nom;
+
+ public Categorie() {
+ }
+
+ public Categorie(String nom) {
+ this.nom = nom;
+ }
+
+ public String toString() {
+ return ( nom );
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getNom() {
+ return nom;
+ }
+
+ public void setNom(String nom) {
+ this.nom = nom;
+ }
+}
+
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Categorie.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/DelegationWrapper.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/DelegationWrapper.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/DelegationWrapper.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,57 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationHandler;
+import java.io.Serializable;
+
+import org.hibernate.Session;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DelegationWrapper implements InvocationHandler, Serializable {
+ Object realSession;
+
+ public DelegationWrapper(Session session) {
+ this.realSession = session;
+ }
+
+ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+ try {
+ return method.invoke( realSession, args );
+ }
+ catch (InvocationTargetException e) {
+ if ( e.getTargetException() instanceof RuntimeException ) {
+ throw (RuntimeException) e.getTargetException();
+ }
+ else {
+ throw e;
+ }
+ }
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/DelegationWrapper.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Domain.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Domain.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Domain.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,68 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Domain {
+ @Id
+ @DocumentId
+ private Integer id;
+ @Field
+ private String name;
+
+ public Domain(){ }
+
+ public Domain(Integer id, String name) {
+ this.id = id;
+ this.name = name;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Domain.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Email.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Email.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Email.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,98 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.ManyToOne;
+import javax.persistence.FetchType;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Email {
+ @Id
+ @DocumentId
+ private Long id;
+
+ @Field(index = Index.TOKENIZED)
+ private String title;
+
+ @Field(index = Index.TOKENIZED)
+ private String body;
+
+ private String header;
+
+ @IndexedEmbedded @ManyToOne(fetch = FetchType.LAZY)
+ private Domain domain;
+
+ public Domain getDomain() {
+ return domain;
+ }
+
+ public void setDomain(Domain domain) {
+ this.domain = domain;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+ public String getBody() {
+ return body;
+ }
+
+ public void setBody(String body) {
+ this.body = body;
+ }
+
+ public String getHeader() {
+ return header;
+ }
+
+ public void setHeader(String header) {
+ this.header = header;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Email.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Entite.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Entite.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Entite.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.ManyToOne;
+import javax.persistence.FetchType;
+
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Indexed
+@Entity
+public class Entite {
+ @DocumentId
+ @Id
+ @GeneratedValue
+ private Integer id;
+
+ @Field( index = Index.TOKENIZED, store = Store.YES )
+ private String titre;
+
+ @IndexedEmbedded
+ @ManyToOne(fetch = FetchType.LAZY)
+ private Categorie categorie;
+
+ public Entite() {
+ }
+
+ public Entite(String titre, Categorie categorie) {
+ this.titre = titre;
+ this.categorie = categorie;
+ }
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getTitre() {
+ return titre;
+ }
+
+ public void setTitre(String titre) {
+ this.titre = titre;
+ }
+
+ public Categorie getCategorie() {
+ return categorie;
+ }
+
+ public void setCategorie(Categorie categorie) {
+ this.categorie = categorie;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/Entite.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,228 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import java.sql.Statement;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Iterator;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.hibernate.ScrollMode;
+import org.hibernate.ScrollableResults;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MassIndexTest extends SearchTestCase {
+
+ public void testBatchSize() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ int loop = 14;
+ for (int i = 0; i < loop; i++) {
+ Statement statmt = s.connection().createStatement();
+ statmt.executeUpdate( "insert into Domain(id, name) values( + "
+ + ( i + 1 ) + ", 'sponge" + i + "')" );
+ statmt.executeUpdate( "insert into Email(id, title, body, header, domain_id) values( + "
+ + ( i + 1 ) + ", 'Bob Sponge', 'Meet the guys who create the software', 'nope', " + ( i + 1 ) +")" );
+ statmt.close();
+ }
+ tx.commit();
+ s.close();
+
+ //check non created object does get found!!1
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ ScrollableResults results = s.createCriteria( Email.class ).scroll( ScrollMode.FORWARD_ONLY );
+ int index = 0;
+ while ( results.next() ) {
+ index++;
+ s.index( results.get( 0 ) );
+ if ( index % 5 == 0 ) s.clear();
+ }
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ List result = s.createFullTextQuery( parser.parse( "body:create" ) ).list();
+ assertEquals( 14, result.size() );
+ for (Object object : result) {
+ s.delete( object );
+ }
+ tx.commit();
+ s.close();
+ }
+
+
+ public void testTransactional() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ int loop = 4;
+ for (int i = 0; i < loop; i++) {
+ Email email = new Email();
+ email.setId( (long) i + 1 );
+ email.setTitle( "JBoss World Berlin" );
+ email.setBody( "Meet the guys who wrote the software" );
+ s.persist( email );
+ }
+ tx.commit();
+ s.close();
+
+ //check non created object does get found!!1
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ List result = s.createFullTextQuery( parser.parse( "body:create" ) ).list();
+ assertEquals( 0, result.size() );
+ tx.commit();
+ s.close();
+
+ s = new FullTextSessionImpl( openSession() );
+ s.getTransaction().begin();
+ Statement stmt = s.connection().createStatement();
+ stmt.executeUpdate( "update Email set body='Meet the guys who write the software'" );
+ stmt.close();
+ //insert an object never indexed
+ stmt = s.connection().createStatement();
+ stmt.executeUpdate( "insert into Email(id, title, body, header) values( + "
+ + ( loop + 1 ) + ", 'Bob Sponge', 'Meet the guys who create the software', 'nope')" );
+ stmt.close();
+ s.getTransaction().commit();
+ s.close();
+
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ result = s.createFullTextQuery( parser.parse( "body:write" ) ).list();
+ assertEquals( 0, result.size() );
+ result = s.createCriteria( Email.class ).list();
+ for (int i = 0; i < loop / 2; i++)
+ s.index( result.get( i ) );
+ tx.commit(); //do the process
+ s.index( result.get( loop / 2 ) ); //do the process out of tx
+ tx = s.beginTransaction();
+ for (int i = loop / 2 + 1; i < loop; i++)
+ s.index( result.get( i ) );
+ tx.commit(); //do the process
+ s.close();
+
+ s = Search.getFullTextSession( openSession() );
+ tx = s.beginTransaction();
+ //object never indexed
+ Email email = (Email) s.get( Email.class, Long.valueOf( loop + 1 ) );
+ s.index( email );
+ tx.commit();
+ s.close();
+
+ //check non indexed object get indexed by s.index
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ result = s.createFullTextQuery( parser.parse( "body:create" ) ).list();
+ assertEquals( 1, result.size() );
+ tx.commit();
+ s.close();
+ }
+
+ public void testLazyLoading() throws Exception {
+ Categorie cat = new Categorie( "Livre" );
+ Entite ent = new Entite( "Le temple des songes", cat );
+ Session s = openSession();
+ Transaction tx = s.beginTransaction();
+ s.persist( cat );
+ s.persist( ent );
+ tx.commit();
+ s.close();
+
+ s = getSessionWithAutoCommit();
+ FullTextSession session = Search.getFullTextSession( s );
+ Query luceneQuery = new TermQuery( new Term( "categorie.nom", "livre" ) );
+ List result = session.createFullTextQuery( luceneQuery, Entite.class ).list();
+ assertEquals( 1, result.size() );
+ s.close();
+
+ s = getSessionWithAutoCommit();
+ ent = (Entite) s.get( Entite.class, ent.getId() );
+ session = Search.getFullTextSession( s );
+ session.index( ent );
+ s.close();
+
+ s = getSessionWithAutoCommit();
+ session = Search.getFullTextSession( s );
+ luceneQuery = new TermQuery( new Term( "categorie.nom", "livre" ) );
+ result = session.createFullTextQuery( luceneQuery, Entite.class ).list();
+ assertEquals( "test lazy loading and indexing", 1, result.size() );
+ s.close();
+
+ s = getSessionWithAutoCommit();
+ Iterator it = s.createQuery( "from Entite where id = :id").setParameter( "id", ent.getId() ).iterate();
+ session = Search.getFullTextSession( s );
+ while ( it.hasNext() ) {
+ ent = (Entite) it.next();
+ session.index( ent );
+ }
+ s.close();
+
+ s = getSessionWithAutoCommit();
+ session = Search.getFullTextSession( s );
+ luceneQuery = new TermQuery( new Term( "categorie.nom", "livre" ) );
+ result = session.createFullTextQuery( luceneQuery, Entite.class ).list();
+ assertEquals( "test lazy loading and indexing", 1, result.size() );
+ s.close();
+ }
+
+ private Session getSessionWithAutoCommit() throws SQLException {
+ Session s;
+ s = openSession();
+ s.connection().setAutoCommit( true );
+ return s;
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.worker.batch_size", "5" );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Email.class,
+ Entite.class,
+ Categorie.class,
+ Domain.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexUsingManualFlushTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexUsingManualFlushTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexUsingManualFlushTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,98 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import java.sql.Statement;
+import java.util.List;
+
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.Environment;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.Transaction;
+import org.hibernate.ScrollableResults;
+import org.hibernate.ScrollMode;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class MassIndexUsingManualFlushTest extends SearchTestCase {
+ public void testManualIndexFlush() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ int loop = 14;
+ for (int i = 0; i < loop; i++) {
+ Statement statmt = s.connection().createStatement();
+ statmt.executeUpdate( "insert into Domain(id, name) values( + "
+ + ( i + 1 ) + ", 'sponge" + i + "')" );
+ statmt.executeUpdate( "insert into Email(id, title, body, header, domain_id) values( + "
+ + ( i + 1 ) + ", 'Bob Sponge', 'Meet the guys who create the software', 'nope', " + ( i + 1 ) +")" );
+ statmt.close();
+ }
+ tx.commit();
+ s.close();
+
+ //check non created object does get found!!1
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ ScrollableResults results = s.createCriteria( Email.class ).scroll( ScrollMode.FORWARD_ONLY );
+ int index = 0;
+ while ( results.next() ) {
+ index++;
+ final Email o = (Email) results.get( 0 );
+ s.index( o );
+ if ( index % 5 == 0 ) {
+ s.flushToIndexes();
+ s.clear();
+ }
+ }
+ tx.commit();
+ s.clear();
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ List result = s.createFullTextQuery( parser.parse( "body:create" ) ).list();
+ assertEquals( 14, result.size() );
+ for (Object object : result) {
+ s.delete( object );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Email.class,
+ Domain.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/MassIndexUsingManualFlushTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/OptimizeTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/OptimizeTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/OptimizeTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,110 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import java.io.File;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class OptimizeTest extends SearchTestCase {
+
+ public void testOptimize() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+ int loop = 2000;
+ for (int i = 0; i < loop; i++) {
+ Email email = new Email();
+ email.setId( (long) i + 1 );
+ email.setTitle( "JBoss World Berlin" );
+ email.setBody( "Meet the guys who wrote the software" );
+ s.persist( email );
+ }
+ tx.commit();
+ s.close();
+
+ s = Search.getFullTextSession( openSession() );
+ tx = s.beginTransaction();
+ s.getSearchFactory().optimize( Email.class );
+ tx.commit();
+ s.close();
+
+ //check non indexed object get indexed by s.index
+ s = new FullTextSessionImpl( openSession() );
+ tx = s.beginTransaction();
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ int result = s.createFullTextQuery( parser.parse( "body:wrote" ) ).getResultSize();
+ assertEquals( 2000, result );
+ s.createQuery( "delete " + Email.class.getName() ).executeUpdate();
+ tx.commit();
+ s.close();
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for (File file : files) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Email.class,
+ Domain.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/OptimizeTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/SessionTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/SessionTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/SessionTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,109 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.session;
+
+import java.lang.reflect.Proxy;
+
+import org.hibernate.Criteria;
+import org.hibernate.Session;
+import org.hibernate.context.ThreadLocalSessionContext;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.criterion.DetachedCriteria;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SessionTest extends SearchTestCase {
+
+ private static final Class[] SESS_PROXY_INTERFACES = new Class[] {
+ org.hibernate.classic.Session.class,
+ org.hibernate.engine.SessionImplementor.class,
+ org.hibernate.jdbc.JDBCContext.Context.class,
+ org.hibernate.event.EventSource.class
+ };
+
+ public void testSessionWrapper() throws Exception {
+ Session s = openSession();
+ DelegationWrapper wrapper = new DelegationWrapper( s );
+ Session wrapped = ( Session ) Proxy.newProxyInstance(
+ org.hibernate.classic.Session.class.getClassLoader(),
+ SESS_PROXY_INTERFACES,
+ wrapper
+ );
+ try {
+ Search.getFullTextSession( wrapped );
+ }
+ catch ( ClassCastException e ) {
+ e.printStackTrace();
+ fail( e.toString() );
+ }
+ wrapped.close();
+ }
+
+ public void testDetachedCriteria() throws Exception {
+ FullTextSession s = Search.getFullTextSession( openSession() );
+ DetachedCriteria dc = DetachedCriteria.forClass( Email.class );
+ try {
+ Criteria c = dc.getExecutableCriteria( s ).setMaxResults( 10 );
+ c.list();
+ }
+ catch ( ClassCastException e ) {
+ e.printStackTrace();
+ fail( e.toString() );
+ }
+ s.close();
+ }
+
+ public void testThreadBoundSessionWrappingOutOfTransaction() throws Exception {
+ final Session session = getSessions().getCurrentSession();
+ try {
+ FullTextSession fts = Search.getFullTextSession( session );
+ //success
+ }
+ finally {
+ //clean up after the mess
+ ThreadLocalSessionContext.unbind( getSessions() );
+ }
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Email.class,
+ Domain.class
+ };
+ }
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ // for this test we explcitly set the auto commit mode since we are not explcitly starting a transaction
+ // which could be a problem in some databases.
+ cfg.setProperty( "hibernate.connection.autocommit", "true" );
+ //needed for testThreadBoundSessionWrappingOutOfTransaction
+ cfg.setProperty( "hibernate.current_session_context_class", "thread" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/session/SessionTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Animal.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Animal.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Animal.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "Animal")
+public class Animal {
+ @Id
+ @DocumentId
+ private Integer id;
+ @Field(index = Index.TOKENIZED)
+ private String name;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Animal.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,89 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import java.io.Serializable;
+import java.util.Properties;
+
+import org.apache.lucene.document.Document;
+
+import org.hibernate.search.FullTextFilter;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * Shards an index containing data for multiple customers by customerID. customerID is
+ * provided as a property on all indexes entities, and is also defined as a Filter.
+ *
+ * The number of shards should be configured to be MAX(customerID).
+ *
+ * @author Chase Seibert
+ */
+public class CustomerShardingStrategy implements IndexShardingStrategy {
+
+ // stored DirectoryProviders in a array indexed by customerID
+ private DirectoryProvider<?>[] providers;
+
+ public void initialize(Properties properties, DirectoryProvider<?>[] providers) {
+ this.providers = providers;
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForAllShards() {
+ return providers;
+ }
+
+ public DirectoryProvider<?> getDirectoryProviderForAddition(Class<?> entity, Serializable id, String idInString, Document document) {
+ Integer customerID = Integer.parseInt(document.getField("customerID").stringValue());
+ return providers[customerID];
+ }
+
+ public DirectoryProvider<?>[] getDirectoryProvidersForDeletion(Class<?> entity, Serializable id, String idInString) {
+ return getDirectoryProvidersForAllShards();
+ }
+
+ /**
+ * Optimization; don't search ALL shards and union the results; in this case, we
+ * can be certain that all the data for a particular customer Filter is in a single
+ * shard; simply return that shard by customerID.
+ */
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] filters) {
+ FullTextFilter filter = getCustomerFilter(filters, "customer");
+ if (filter == null) {
+ return getDirectoryProvidersForAllShards();
+ }
+ else {
+ return new DirectoryProvider[] { providers[Integer.parseInt(filter.getParameter("customerID").toString())] };
+ }
+ }
+
+ private FullTextFilter getCustomerFilter(FullTextFilterImplementor[] filters, String name) {
+ for (FullTextFilterImplementor filter: filters) {
+ if (filter.getName().equals(name)) return filter;
+ }
+ return null;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategyTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategyTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategyTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,80 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import junit.framework.TestCase;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+
+import org.hibernate.search.query.FullTextFilterImpl;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.RAMDirectoryProvider;
+
+/**
+ * @author Chase Seibert
+ */
+public class CustomerShardingStrategyTest extends TestCase {
+
+ private CustomerShardingStrategy shardStrategy;
+
+ protected void setUp() throws Exception {
+ shardStrategy = new CustomerShardingStrategy();
+
+ // initilaize w/ 10 shards
+ shardStrategy.initialize( null, new DirectoryProvider[] {
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider(),
+ new RAMDirectoryProvider()
+ } );
+ }
+
+ public void testGetDirectoryProvidersForQuery() {
+
+ FullTextFilterImpl filter = new FullTextFilterImpl();
+ filter.setName("customer");
+ filter.setParameter("customerID", 5);
+
+ // customerID == 5 should correspond to just a single shard instance
+ DirectoryProvider[] providers = shardStrategy.getDirectoryProvidersForQuery(new FullTextFilterImpl[] { filter });
+ assertTrue(providers.length == 1);
+
+ // create a dummy document for the same customerID, and make sure the shard it would be
+ // indexed on matches the shard returned by getDirectoryProvidersForQuery()
+ Document document = new Document();
+ document.add(new Field("customerID", "5", Field.Store.NO, Field.Index.NOT_ANALYZED));
+
+ assertTrue(providers[0].equals(
+ shardStrategy.getDirectoryProviderForAddition(null, null, null, document)
+ ));
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/CustomerShardingStrategyTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/DirectoryProviderForQueryTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/DirectoryProviderForQueryTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/DirectoryProviderForQueryTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,96 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import java.util.List;
+
+import org.apache.lucene.queryParser.QueryParser;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * @author Chase Seibert
+ */
+public class DirectoryProviderForQueryTest extends SearchTestCase {
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ // this strategy allows the caller to use a pre-search filter to define which index to hit
+ cfg.setProperty( "hibernate.search.Email.sharding_strategy", SpecificShardingStrategy.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Email.sharding_strategy.nbr_of_shards", "2" );
+ }
+
+ /**
+ * Test that you can filter by shard
+ */
+ public void testDirectoryProviderForQuery() throws Exception {
+
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+
+ Email a = new Email();
+ a.setId( 1 );
+ a.setBody( "corporate message" );
+ s.persist( a );
+
+ a = new Email();
+ a.setId( 2 );
+ a.setBody( "spam message" );
+ s.persist( a );
+
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ FullTextSession fts = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+
+ FullTextQuery fullTextQuery = fts.createFullTextQuery( parser.parse( "body:message" ) );
+ List results = fullTextQuery.list();
+ assertEquals( "Query with no filter should bring back results from both shards.", 2, results.size() );
+
+ // index is not a field on the entity; the only way to filter on this is by shard
+ fullTextQuery.enableFullTextFilter("shard").setParameter("index", 0);
+ assertEquals( "Query with filter should bring back results from only one shard.", 1, fullTextQuery.list().size() );
+
+ for (Object o : results) s.delete( o );
+ tx.commit();
+ s.close();
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Email.class
+ };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/DirectoryProviderForQueryTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Email.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Email.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Email.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,64 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.FullTextFilterDef;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.filter.ShardSensitiveOnlyFilter;
+
+@Entity
+@Indexed(index="Email")
+@FullTextFilterDef(name="shard", impl= ShardSensitiveOnlyFilter.class)
+public class Email {
+
+ @Id
+ @DocumentId
+ private Integer id;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ @Field
+ private String body;
+
+ public String getBody() {
+ return body;
+ }
+
+ public void setBody(String body) {
+ this.body = body;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Email.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Furniture.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Furniture.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Furniture.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,61 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Furniture {
+ @Id @GeneratedValue @DocumentId
+ private Integer id;
+ @Field
+ private String color;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getColor() {
+ return color;
+ }
+
+ public void setColor(String color) {
+ this.color = color;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/Furniture.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/IdShardingStrategyTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/IdShardingStrategyTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/IdShardingStrategyTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IdHashShardingStrategy;
+import org.hibernate.search.store.RAMDirectoryProvider;
+
+import junit.framework.TestCase;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class IdShardingStrategyTest extends TestCase {
+
+ private IdHashShardingStrategy shardStrategy;
+
+ protected void setUp() throws Exception {
+ shardStrategy = new IdHashShardingStrategy();
+ shardStrategy.initialize( null, new DirectoryProvider[] {
+ new RAMDirectoryProvider(), new RAMDirectoryProvider() } );
+ }
+
+ public void testHashOverflow() {
+ String key = String.valueOf( Integer.MAX_VALUE - 1 );
+ // any key will do as long as it's hash is negative
+ assertTrue( key.hashCode() < 0 );
+ assertAcceptableId( key );
+ }
+
+ private void assertAcceptableId(String id) {
+ try {
+ shardStrategy.getDirectoryProviderForAddition( null, id, id, null );
+ shardStrategy.getDirectoryProvidersForDeletion( null, id, id );
+ }
+ catch ( Exception e ) {
+ fail( "Couldn't get directory for id " + id );
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/IdShardingStrategyTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/ShardsTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/ShardsTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/ShardsTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,223 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import java.io.File;
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.store.IdHashShardingStrategy;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ShardsTest extends SearchTestCase {
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getName() );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ //is the default when multiple shards are set up
+ //cfg.setProperty( "hibernate.search.Animal.sharding_strategy", IdHashShardingStrategy.class );
+ cfg.setProperty( "hibernate.search.Animal.sharding_strategy.nbr_of_shards", "2" );
+ cfg.setProperty( "hibernate.search.Animal.0.indexName", "Animal00" );
+ }
+
+ public void testIdShardingStrategy() {
+ DirectoryProvider[] dps = new DirectoryProvider[] { new RAMDirectoryProvider(), new RAMDirectoryProvider() };
+ IdHashShardingStrategy shardingStrategy = new IdHashShardingStrategy();
+ shardingStrategy.initialize( null, dps);
+ assertTrue( dps[1] == shardingStrategy.getDirectoryProviderForAddition( Animal.class, 1, "1", null) );
+ assertTrue( dps[0] == shardingStrategy.getDirectoryProviderForAddition( Animal.class, 2, "2", null) );
+ }
+
+ public void testBehavior() throws Exception {
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+ Animal a = new Animal();
+ a.setId( 1 );
+ a.setName( "Elephant" );
+ s.persist( a );
+ a = new Animal();
+ a.setId( 2 );
+ a.setName( "Bear" );
+ s.persist( a );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ a = (Animal) s.get(Animal.class, 1);
+ a.setName( "Mouse" );
+ Furniture fur = new Furniture();
+ fur.setColor( "dark blue");
+ s.persist( fur );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ FullTextSession fts = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+
+ List results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear" ) ).list();
+ assertEquals( "Either double insert, single update, or query fails with shards", 2, results.size() );
+
+ results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear OR color:blue" ) ).list();
+ assertEquals( "Mixing shared and non sharded properties fails", 3, results.size() );
+ results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear OR color:blue" ) ).list();
+ assertEquals( "Mixing shared and non sharded properties fails with indexreader reuse", 3, results.size() );
+ for (Object o : results) s.delete( o );
+ tx.commit();
+ s.close();
+ }
+
+ public void testInternalSharding() throws Exception {
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+ Animal a = new Animal();
+ a.setId( 1 );
+ a.setName( "Elephant" );
+ s.persist( a );
+ a = new Animal();
+ a.setId( 2 );
+ a.setName( "Bear" );
+ s.persist( a );
+ tx.commit();
+
+ s.clear();
+
+ FSDirectory animal00Directory = FSDirectory.open( new File( getBaseIndexDir(), "Animal00" ) );
+ try {
+ IndexReader reader = IndexReader.open( animal00Directory );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ }
+ finally {
+ reader.close();
+ }
+ }
+ finally {
+ animal00Directory.close();
+ }
+
+ FSDirectory animal01Directory = FSDirectory.open( new File( getBaseIndexDir(), "Animal.1" ) );
+ try {
+ IndexReader reader = IndexReader.open( animal01Directory );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ }
+ finally {
+ reader.close();
+ }
+ }
+ finally {
+ animal01Directory.close();
+ }
+
+ tx = s.beginTransaction();
+ a = (Animal) s.get(Animal.class, 1);
+ a.setName( "Mouse" );
+ tx.commit();
+
+ s.clear();
+
+ animal01Directory = FSDirectory.open( new File( getBaseIndexDir(), "Animal.1" ) );
+ try {
+ IndexReader reader = IndexReader.open( animal01Directory );
+ try {
+ int num = reader.numDocs();
+ assertEquals( 1, num );
+ TermDocs docs = reader.termDocs( new Term( "name", "mouse" ) );
+ assertTrue( docs.next() );
+ org.apache.lucene.document.Document doc = reader.document( docs.doc() );
+ assertFalse( docs.next() );
+ }
+ finally {
+ reader.close();
+ }
+ }
+ finally {
+ animal01Directory.close();
+ }
+
+ tx = s.beginTransaction();
+ FullTextSession fts = Search.getFullTextSession( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+
+ List results = fts.createFullTextQuery( parser.parse( "name:mouse OR name:bear" ) ).list();
+ assertEquals( "Either double insert, single update, or query fails with shards", 2, results.size() );
+ for (Object o : results) s.delete( o );
+ tx.commit();
+ s.close();
+ }
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for (File file : files) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Animal.class,
+ Furniture.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/ShardsTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/SpecificShardingStrategy.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/SpecificShardingStrategy.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/SpecificShardingStrategy.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,53 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.shards;
+
+import org.hibernate.search.FullTextFilter;
+import org.hibernate.search.filter.FullTextFilterImplementor;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IdHashShardingStrategy;
+
+public class SpecificShardingStrategy extends IdHashShardingStrategy {
+
+ @Override
+ public DirectoryProvider<?>[] getDirectoryProvidersForQuery(FullTextFilterImplementor[] filters) {
+
+ FullTextFilter filter = getFilter(filters, "shard");
+ if (filter == null) {
+ return getDirectoryProvidersForAllShards();
+ }
+ else {
+ return new DirectoryProvider[] { getDirectoryProvidersForAllShards()[Integer.parseInt(filter.getParameter("index").toString())] };
+ }
+ }
+
+ private FullTextFilter getFilter(FullTextFilterImplementor[] filters, String name) {
+ for (FullTextFilterImplementor filter: filters) {
+ if (filter.getName().equals(name)) return filter;
+ }
+ return null;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/shards/SpecificShardingStrategy.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Can.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Can.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Can.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,63 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed
+public class Can {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+ @Field
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Can.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,65 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import org.apache.lucene.search.DefaultSimilarity;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@SuppressWarnings("serial")
+public class DummySimilarity extends DefaultSimilarity {
+ private float CONST = 1.0f;
+
+ @Override
+ public float lengthNorm(String fieldName, int numTerms) {
+ return CONST;
+ }
+
+ @Override
+ public float queryNorm(float sumOfSquaredWeights) {
+ return CONST;
+ }
+
+ @Override
+ public float tf(float freq) {
+ return CONST;
+ }
+
+ @Override
+ public float sloppyFreq(int distance) {
+ return CONST;
+ }
+
+ @Override
+ public float idf(int docFreq, int numDocs) {
+ return CONST;
+ }
+
+ @Override
+ public float coord(int overlap, int maxOverlap) {
+ return CONST;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity2.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity2.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity2.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,64 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import org.apache.lucene.search.DefaultSimilarity;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class DummySimilarity2 extends DefaultSimilarity {
+ private float CONST = .5f;
+
+ @Override
+ public float lengthNorm(String fieldName, int numTerms) {
+ return CONST;
+ }
+
+ @Override
+ public float queryNorm(float sumOfSquaredWeights) {
+ return CONST;
+ }
+
+ @Override
+ public float tf(float freq) {
+ return CONST;
+ }
+
+ @Override
+ public float sloppyFreq(int distance) {
+ return CONST;
+ }
+
+ @Override
+ public float idf(int docFreq, int numDocs) {
+ return CONST;
+ }
+
+ @Override
+ public float coord(int overlap, int maxOverlap) {
+ return CONST;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/DummySimilarity2.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/IllegalSimilarityConfigurationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/IllegalSimilarityConfigurationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/IllegalSimilarityConfigurationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,122 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import junit.framework.TestCase;
+
+import org.hibernate.search.test.util.FullTextSessionBuilder;
+
+public class IllegalSimilarityConfigurationTest extends TestCase {
+
+ public void testValidConfiguration() {
+ boolean configurationIsLegal = true;
+ FullTextSessionBuilder builder = null;
+ try {
+ builder = new FullTextSessionBuilder()
+ .addAnnotatedClass(Can.class)
+ .addAnnotatedClass(Trash.class).build();
+ } catch (Exception e) {
+ configurationIsLegal = false;
+ }
+ finally {
+ if (builder!=null)
+ builder.close();
+ }
+ assertTrue( "A valid configuration could not be started.", configurationIsLegal );
+ }
+
+ public void testInconsistentSimilarityInClassHierarchy() {
+ boolean configurationIsLegal = true;
+ FullTextSessionBuilder builder = null;
+ try {
+ builder = new FullTextSessionBuilder()
+ .addAnnotatedClass( Trash.class )
+ .addAnnotatedClass( LittleTrash.class ).build();
+ } catch (Exception e) {
+ configurationIsLegal = false;
+ }
+ finally {
+ if (builder!=null)
+ builder.close();
+ }
+ assertFalse( "Invalid Similarity declared, should have thrown an exception: same similarity"
+ + " must be used across class hierarchy", configurationIsLegal );
+ }
+
+ public void testInconsistentSimilarityInClassSharingAnIndex() {
+ boolean configurationIsLegal = true;
+ FullTextSessionBuilder builder = null;
+ try {
+ builder = new FullTextSessionBuilder()
+ .addAnnotatedClass( Trash.class )
+ .addAnnotatedClass( Sink.class ).build();
+ } catch (Exception e) {
+ configurationIsLegal = false;
+ }
+ finally {
+ if (builder!=null)
+ builder.close();
+ }
+ assertFalse( "Invalid Similarity declared, should have thrown an exception: two entities"
+ + "sharing the same index are using a different similarity", configurationIsLegal );
+}
+
+ public void testImplicitSimilarityInheritanceIsValid() {
+ boolean configurationIsLegal = true;
+ FullTextSessionBuilder builder = null;
+ try {
+ builder = new FullTextSessionBuilder()
+ .addAnnotatedClass( Trash.class )
+ .addAnnotatedClass( ProperTrashExtension.class ).build();
+ } catch (Exception e) {
+ configurationIsLegal = false;
+ }
+ finally {
+ if (builder!=null)
+ builder.close();
+ }
+ assertTrue( "Valid configuration could not be built", configurationIsLegal );
+}
+
+ public void testInvalidToOverrideParentsSimilarity() {
+ boolean configurationIsLegal = true;
+ FullTextSessionBuilder builder = null;
+ try {
+ builder = new FullTextSessionBuilder()
+ .addAnnotatedClass( Can.class )
+ .addAnnotatedClass( SmallerCan.class ).build();
+ } catch (Exception e) {
+ configurationIsLegal = false;
+ }
+ finally {
+ if (builder!=null)
+ builder.close();
+ }
+ assertFalse( "Invalid Similarity declared, should have thrown an exception: child entity"
+ + " is overriding parent's Similarity", configurationIsLegal );
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/IllegalSimilarityConfigurationTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/LittleTrash.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/LittleTrash.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/LittleTrash.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,41 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Similarity;
+
+/**
+ * @author Sanne Grinovero
+ */
+@Entity
+@Similarity(impl = DummySimilarity2.class)
+public class LittleTrash extends Trash {
+
+ //extends Trash, but declares a different similarity
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/LittleTrash.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/ProperTrashExtension.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/ProperTrashExtension.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/ProperTrashExtension.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,36 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import org.hibernate.search.annotations.Indexed;
+
+@Indexed
+//Is inheriting: @Similarity(impl = DummySimilarity.class)
+public class ProperTrashExtension extends Trash {
+
+ //only needing a different type for testing purposes
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/ProperTrashExtension.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SimilarityTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SimilarityTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SimilarityTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,95 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import java.util.List;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.Search;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.Environment;
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.cfg.Configuration;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.index.Term;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SimilarityTest extends SearchTestCase {
+ public void testClassAndGlobalSimilarity() throws Exception {
+ Session s = openSession( );
+ Transaction tx = s.beginTransaction();
+ Trash trash = new Trash();
+ trash.setName( "Green trash" );
+ s.persist( trash );
+ trash = new Trash();
+ trash.setName( "Green Green Green trash" );
+ s.persist( trash );
+ Can can = new Can();
+ can.setName( "Green can" );
+ s.persist( can );
+ can = new Can();
+ can.setName( "Green Green Green can" );
+ s.persist( can );
+ tx.commit();
+
+ s.clear();
+
+ tx = s.beginTransaction();
+ TermQuery tq = new TermQuery( new Term("name", "green") );
+ FullTextSession fts = Search.getFullTextSession( s );
+ List results = fts.createFullTextQuery( tq, Trash.class ).setProjection( FullTextQuery.SCORE, FullTextQuery.THIS ).list();
+ assertEquals( 2, results.size() );
+ assertEquals( "Similarity not overridden at the class level", ( (Object[]) results.get( 0 ) )[0], ( (Object[]) results.get( 1 ) )[0]);
+ assertEquals( "Similarity not overridden", 1.0f, ( (Object[]) results.get( 0 ) )[0] );
+ for (Object result : results) s.delete( ( (Object[]) result )[1] );
+
+ results = fts.createFullTextQuery( tq, Can.class ).setProjection( FullTextQuery.SCORE, FullTextQuery.THIS ).list();
+ assertEquals( 2, results.size() );
+ assertEquals( "Similarity not overridden by the global setting", ( (Object[]) results.get( 0 ) )[0], ( (Object[]) results.get( 1 ) )[0]);
+ assertFalse( "Similarity not overridden by the global setting", new Float(1.0f).equals( ( (Object[]) results.get( 0 ) )[0] ) );
+ for (Object result : results) s.delete( ( (Object[]) result )[1] );
+
+ tx.commit();
+ s.close();
+
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Trash.class,
+ Can.class
+ };
+ }
+
+ @Override
+ protected void configure(Configuration cfg) {
+ cfg.setProperty( Environment.SIMILARITY_CLASS, DummySimilarity2.class.getName() );
+ super.configure( cfg );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SimilarityTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Sink.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Sink.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Sink.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,67 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Similarity;
+
+/**
+ * @author Sanne Grinovero
+ */
+@Entity
+@Indexed(index="garbageIndex")
+@Similarity(impl = DummySimilarity2.class)
+public class Sink {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Sink.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SmallerCan.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SmallerCan.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SmallerCan.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,40 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Similarity;
+
+@Entity
+@Indexed
+@Similarity(impl = DummySimilarity2.class)
+public class SmallerCan extends Can {
+
+ //illegal type: can't override a parent defined Similarity
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/SmallerCan.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ LF
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Trash.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Trash.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Trash.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.similarity;
+
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Entity;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Similarity;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="garbageIndex")
+@Similarity(impl = DummySimilarity.class)
+public class Trash {
+ @Id
+ @DocumentId
+ @GeneratedValue
+ private Integer id;
+
+ @Field
+ private String name;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/similarity/Trash.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/AnalyzerUtils.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/AnalyzerUtils.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/AnalyzerUtils.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,131 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.Assert;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenStream;
+import org.slf4j.Logger;
+
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * Helper class to test analyzers. Taken and modified from <i>Lucene in Action</i>.
+ *
+ * @author Hardy Ferentschik
+ */
+public class AnalyzerUtils {
+
+ public static final Logger log = LoggerFactory.make();
+
+ public static Token[] tokensFromAnalysis(Analyzer analyzer, String field, String text) throws IOException {
+ TokenStream stream = analyzer.tokenStream( field, new StringReader( text ) );
+ List<Token> tokenList = new ArrayList<Token>();
+ Token reusableToken = new Token();
+ while ( true ) {
+
+ Token token = stream.next( reusableToken );
+ if ( token == null ) {
+ break;
+ }
+
+ tokenList.add( ( Token ) token.clone() );
+ }
+
+ return tokenList.toArray( new Token[tokenList.size()] );
+ }
+
+ public static void displayTokens(Analyzer analyzer, String field, String text) throws IOException {
+ Token[] tokens = tokensFromAnalysis( analyzer, field, text );
+
+ for ( Token token : tokens ) {
+ log.debug( "[" + getTermText( token ) + "] " );
+ }
+ }
+
+ public static void displayTokensWithPositions(Analyzer analyzer, String field, String text) throws IOException {
+ Token[] tokens = tokensFromAnalysis( analyzer, field, text );
+
+ int position = 0;
+
+ for ( Token token : tokens ) {
+ int increment = token.getPositionIncrement();
+
+ if ( increment > 0 ) {
+ position = position + increment;
+ System.out.println();
+ System.out.print( position + ": " );
+ }
+
+ log.debug( "[" + getTermText( token ) + "] " );
+ }
+ }
+
+ public static void displayTokensWithFullDetails(Analyzer analyzer, String field, String text) throws IOException {
+ Token[] tokens = tokensFromAnalysis( analyzer, field, text );
+ StringBuilder builder = new StringBuilder();
+ int position = 0;
+
+ for ( Token token : tokens ) {
+ int increment = token.getPositionIncrement();
+
+ if ( increment > 0 ) {
+ position = position + increment;
+ builder.append( "\n" ).append( position ).append( ": " );
+ }
+
+ builder.append( "[" )
+ .append( getTermText( token ) )
+ .append( ":" )
+ .append( token.startOffset() )
+ .append( "->" )
+ .append(
+ token.endOffset()
+ )
+ .append( ":" )
+ .append( token.type() )
+ .append( "] " );
+ log.debug( builder.toString() );
+ }
+ }
+
+ public static void assertTokensEqual(Token[] tokens, String[] strings) {
+ Assert.assertEquals( strings.length, tokens.length );
+
+ for ( int i = 0; i < tokens.length; i++ ) {
+ Assert.assertEquals( "index " + i, strings[i], getTermText( tokens[i] ) );
+ }
+ }
+
+ public static String getTermText(Token token) {
+ return new String( token.termBuffer(), 0, token.termLength() );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/AnalyzerUtils.java
___________________________________________________________________
Name: svn:keywords
+ Id
Name: svn:eol-style
+ native
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FileHelperTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FileHelperTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FileHelperTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,139 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import junit.framework.TestCase;
+import org.slf4j.Logger;
+
+import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
+
+/**
+ * @author Emmanuel Bernard
+ * @author Hardy Ferentschik
+ */
+public class FileHelperTest extends TestCase {
+ private static final Logger log = LoggerFactory.make();
+
+ private static File root;
+
+ static {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
+ buildDir = ".";
+ }
+ root = new File( buildDir, "filehelper" );
+ log.info( "Using {} as test directory.", root.getAbsolutePath() );
+ }
+
+ /**
+ * Source directory
+ */
+ private String srcDir = "filehelpersrc";
+
+ /**
+ * Destination directory
+ */
+ private String destDir = "filehelperdest";
+
+
+ private File createFile(File dir, String name) throws IOException {
+ File file = new File( dir, name );
+ file.createNewFile();
+ writeDummyDataToFile( file );
+ return file;
+ }
+
+ private void writeDummyDataToFile(File file) throws IOException {
+ FileOutputStream os = new FileOutputStream( file, true );
+ os.write( 1 );
+ os.write( 2 );
+ os.write( 3 );
+ os.flush();
+ os.close();
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File dir = new File( root, srcDir );
+ FileHelper.delete( dir );
+ dir = new File( root, destDir );
+ FileHelper.delete( dir );
+ FileHelper.delete( root );
+ }
+
+ public void testSynchronize() throws Exception {
+ // create a src directory structure
+ File src = new File( root, srcDir );
+ src.mkdirs();
+ String name = "a";
+ createFile( src, name );
+ name = "b";
+ createFile( src, name );
+ File subDir = new File( src, "subdir" );
+ subDir.mkdirs();
+ name = "c";
+ createFile( subDir, name );
+
+ // create destination and sync
+ File dest = new File( root, destDir );
+ assertFalse( "Directories should be out of sync", FileHelper.areInSync( src, dest ) );
+ FileHelper.synchronize( src, dest, true );
+ assertTrue( "Directories should be in sync", FileHelper.areInSync( src, dest ) );
+ File destTestFile1 = new File( dest, "b" );
+ assertTrue( destTestFile1.exists() );
+ File destTestFile2 = new File( new File( dest, "subdir" ), "c" );
+ assertTrue( destTestFile2.exists() );
+
+ // create a new file in destination which does not exists in src. should be deleted after next sync
+ File destTestFile3 = createFile( dest, "foo" );
+
+ // create a file in the src directory and write some data to it
+ File srcTestFile = new File( src, "c" );
+ writeDummyDataToFile( srcTestFile );
+ File destTestFile = new File( dest, "c" );
+ assertNotSame( srcTestFile.lastModified(), destTestFile.lastModified() );
+ assertFalse( "Directories should be out of sync", FileHelper.areInSync( src, dest ) );
+
+ FileHelper.synchronize( src, dest, true );
+
+ assertTrue("Directories should be in sync", FileHelper.areInSync( src, dest ));
+ assertEquals( srcTestFile.lastModified(), destTestFile.lastModified() );
+ assertEquals( srcTestFile.length(), destTestFile.length() );
+ assertTrue( destTestFile1.exists() );
+ assertTrue( destTestFile2.exists() );
+ assertTrue( !destTestFile3.exists() );
+
+ // delete src test file
+ srcTestFile.delete();
+ FileHelper.synchronize( src, dest, true );
+ assertTrue( !destTestFile.exists() );
+ assertTrue("Directories should be in sync", FileHelper.areInSync( src, dest ));
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FileHelperTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FullTextSessionBuilder.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FullTextSessionBuilder.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FullTextSessionBuilder.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,158 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util;
+
+import java.io.File;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.AnnotationConfiguration;
+import org.hibernate.cfg.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.util.FileHelper;
+import org.slf4j.Logger;
+
+/**
+ * Use the builder pattern to provide a SessionFactory.
+ * This is meant to use only ram-based index and databases, for those test
+ * which need to use several differently configured SessionFactories.
+ *
+ * @author Sanne Grinovero
+ * @author Hardy Ferentschik
+ */
+public class FullTextSessionBuilder {
+
+ private static final Logger log = org.hibernate.search.util.LoggerFactory.make();
+
+ private static final File indexDir;
+
+ private AnnotationConfiguration cfg;
+ private SessionFactory sessionFactory;
+ private boolean usingFileSystem = false;
+
+ static {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
+ buildDir = ".";
+ }
+ File current = new File( buildDir );
+ indexDir = new File( current, "indextemp" );
+ log.debug( "Using {} as index directory.", indexDir.getAbsolutePath() );
+ }
+
+ public FullTextSessionBuilder() {
+ cfg = new AnnotationConfiguration();
+ cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
+
+ //cache:
+ cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" );
+ cfg.setProperty( Environment.CACHE_PROVIDER,
+ org.hibernate.cache.HashtableCacheProvider.class.getCanonicalName() );
+ cfg.setProperty( Environment.USE_QUERY_CACHE, "true" );
+
+ //search specific:
+ cfg.setProperty( org.hibernate.search.Environment.ANALYZER_CLASS,
+ StopAnalyzer.class.getName() );
+ useRAMDirectoryProvider( true );
+ }
+
+ /**
+ * @param use if true, use indexes in RAM otherwise use FSDirectoryProvider
+ * @return the same builder (this).
+ */
+ public FullTextSessionBuilder useRAMDirectoryProvider(boolean use) {
+ if ( use ) {
+ cfg.setProperty( "hibernate.search.default.directory_provider",
+ RAMDirectoryProvider.class.getName() );
+ usingFileSystem = false;
+ }
+ else {
+ cfg.setProperty( "hibernate.search.default.directory_provider",
+ FSDirectoryProvider.class.getName() );
+ usingFileSystem = true;
+ }
+ return this;
+ }
+
+ /**
+ * Override before building any parameter, or add new ones.
+ * @param key Property name.
+ * @param value Property value.
+ * @return the same builder (this).
+ */
+ public FullTextSessionBuilder setProperty(String key, String value) {
+ cfg.setProperty( key, value );
+ return this;
+ }
+
+ /**
+ * Adds classes to the SessionFactory being built.
+ * @param annotatedClass The annotated class to add to the configuration.
+ * @return the same builder (this)
+ */
+ public FullTextSessionBuilder addAnnotatedClass(Class annotatedClass) {
+ cfg.addAnnotatedClass( annotatedClass );
+ return this;
+ }
+
+ /**
+ * @return a new FullTextSession based upon the built configuration.
+ */
+ public FullTextSession openFullTextSession() {
+ if ( sessionFactory == null ) {
+ build();
+ }
+ Session session = sessionFactory.openSession();
+ return Search.getFullTextSession( session );
+ }
+
+ /**
+ * Closes the SessionFactory.
+ * Make sure you close all sessions first
+ */
+ public void close() {
+ if ( sessionFactory == null ) {
+ throw new java.lang.IllegalStateException( "sessionFactory not yet built" );
+ }
+ sessionFactory.close();
+ if ( usingFileSystem ) {
+ FileHelper.delete( indexDir );
+ }
+ sessionFactory = null;
+ }
+
+ /**
+ * Builds the sessionFactory as configured so far.
+ */
+ public FullTextSessionBuilder build() {
+ sessionFactory = cfg.buildSessionFactory();
+ return this;
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/FullTextSessionBuilder.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/PluginLoaderTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/PluginLoaderTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/PluginLoaderTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,114 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util;
+
+import org.apache.lucene.search.DefaultSimilarity;
+import org.apache.lucene.search.Similarity;
+import org.hibernate.Session;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.SearchException;
+import org.hibernate.search.backend.BackendQueueProcessorFactory;
+import org.hibernate.search.backend.impl.batchlucene.BatchBackend;
+import org.hibernate.search.backend.impl.batchlucene.LuceneBatchBackend;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.util.PluginLoader;
+
+import junit.framework.TestCase;
+
+/**
+ * Test for PluginLoader, also verifying it throws easy to understand exceptions
+ *
+ * @author Sanne Grinovero
+ */
+public class PluginLoaderTest extends TestCase {
+
+ public void testInstanceFromName() {
+ BatchBackend batchBackend = PluginLoader.instanceFromName(BatchBackend.class, LuceneBatchBackend.class.getName(), getClass(), "Lucene batch backend");
+ assertNotNull( batchBackend );
+ assertTrue( batchBackend.getClass().equals( LuceneBatchBackend.class ) );
+
+ try {
+ PluginLoader.instanceFromName( BackendQueueProcessorFactory.class, "HeyThisClassIsNotThere", getClass(), "backend" );
+ fail( "was expecting a SearchException" );
+ }
+ catch (Exception e) {
+ assertEquals( e.getClass(), SearchException.class );
+ assertEquals( "Unable to find backend implementation class: HeyThisClassIsNotThere", e.getMessage() );
+ }
+ }
+
+ public void testInstanceFromClass() {
+ //testing for interface implementation:
+ BatchBackend batchBackend = PluginLoader.instanceFromClass( BatchBackend.class, LuceneBatchBackend.class, "Lucene batch backend" );
+ assertNotNull( batchBackend );
+ assertTrue( batchBackend.getClass().equals( LuceneBatchBackend.class ) );
+
+ //testing for subclasses:
+ Similarity sim = PluginLoader.instanceFromClass( Similarity.class, DefaultSimilarity.class, "default similarity" );
+ assertNotNull( sim );
+ assertTrue( sim.getClass().equals( DefaultSimilarity.class ) );
+
+ //testing proper error messages:
+ wrappingTestFromClass(
+ "Wrong configuration of Lucene batch backend: class " +
+ "org.hibernate.search.test.util.PluginLoaderTest does not implement " +
+ "interface org.hibernate.search.backend.impl.batchlucene.BatchBackend",
+ BatchBackend.class, PluginLoaderTest.class, "Lucene batch backend"
+ );
+ wrappingTestFromClass(
+ "org.hibernate.search.impl.FullTextSessionImpl defined for component session " +
+ "is missing a no-arguments constructor",
+ FullTextSession.class, FullTextSessionImpl.class, "session"
+ );
+ wrappingTestFromClass(
+ "org.hibernate.Session defined for component session is an interface: implementation required.",
+ FullTextSession.class, Session.class, "session"
+ );
+ wrappingTestFromClass(
+ "Wrong configuration of default similarity: " +
+ "class org.hibernate.search.backend.impl.batchlucene.LuceneBatchBackend " +
+ "is not a subtype of org.apache.lucene.search.Similarity",
+ Similarity.class, LuceneBatchBackend.class, "default similarity"
+ );
+ wrappingTestFromClass(
+ "Unable to instantiate default similarity class: org.apache.lucene.search.Similarity. " +
+ "Verify it has a no-args public constructor and is not abstract.",
+ Similarity.class, Similarity.class, "default similarity"
+ );
+ }
+
+ private void wrappingTestFromClass(String expectedErrorMessage, Class<?> interf, Class<?> impl, String componentName) {
+ try {
+ PluginLoader.instanceFromClass( interf, impl, componentName );
+ fail( "was expecting a SearchException" );
+ }
+ catch (Exception e) {
+ assertEquals( e.getClass(), SearchException.class );
+ assertEquals( expectedErrorMessage, e.getMessage() );
+ }
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/PluginLoaderTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/SentenceInventor.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/SentenceInventor.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/SentenceInventor.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,178 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util.textbuilder;
+
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Test utility meant to produce sentences of a randomly generated language,
+ * having some properties of natural languages.
+ * The goal is to produce sentences which look like a western text,
+ * but are not.
+ * All sentences from the same SentenceInventor will share
+ * a limited dictionary, making the frequencies suitable to test
+ * with Lucene.
+ * Sentences produced depend from the constructor arguments,
+ * making the output predictable for testing purposes.
+ *
+ * @author Sanne Grinovero
+ */
+public class SentenceInventor {
+
+ private final Random r;
+ private final WordDictionary dictionary;
+ //array contains repeated object for probability distribution (more chance for a ",")
+ private final char[] sentenceSeparators = new char[] { ',', ',', ',' , ';', ':', ':' };
+
+ /**
+ * @param randomSeed the seed to use for random generator
+ * @param dictionarySize the number of terms to insert in the dictionary used to build sentences
+ */
+ public SentenceInventor(long randomSeed, int dictionarySize) {
+ r = new Random( randomSeed );
+ dictionary = randomDictionary( dictionarySize );
+ }
+
+ /**
+ * @return a random character from the ASCII table (text chars only)
+ */
+ public char randomCharacter() {
+ return (char) (r.nextInt( 26 ) + 65);
+ }
+
+ /**
+ * @param length the desired length
+ * @return a randomly generated String
+ */
+ public String randomString(int length) {
+ char[] chars = new char[length];
+ for ( int i=0; i<length; i++ ) {
+ chars[i] = randomCharacter();
+ }
+ return new String( chars );
+ }
+
+ /**
+ * Produces a randomly generated String, using
+ * only western alphabet characters and selecting
+ * the length as a normal distribution of natural languages.
+ * @return the generated String
+ */
+ public String randomString() {
+ double d = r.nextGaussian() * 6.3d;
+ int l = (int) d + 6;
+ if ( l > 0 )
+ return randomString( l );
+ else
+ return randomString();
+ }
+
+ /**
+ * Produces a random String, which might be lowercase,
+ * completely uppercase, or uppercasing the first char
+ * (randomly selected)
+ * @return produced String
+ */
+ public String randomTerm() {
+ int i = r.nextInt( 200 );
+ String term = randomString();
+ if ( i > 10 )
+ //completely lowercase 189/200 cases
+ return term.toLowerCase();
+ else if ( i < 2 )
+ //completely uppercase in 2/200 cases
+ return term;
+ else
+ //first letter uppercase in 9/200 cases
+ return term.substring( 0, 1 ) + term.substring( 1 ).toLowerCase();
+ }
+
+ private WordDictionary randomDictionary(int size) {
+ Set<String> tree = new TreeSet<String>();
+ while ( tree.size() != size ) {
+ tree.add( randomTerm() );
+ }
+ return new WordDictionary( tree );
+ }
+
+ /**
+ * Builds a sentence concatenating terms from the generated dictionary and spaces
+ * @return a sentence
+ */
+ public String nextSentence() {
+ int sentenceLength = r.nextInt( 3 ) + r.nextInt( 10 ) + 1;
+ String[] sentence = new String[sentenceLength];
+ for ( int i=0; i<sentenceLength; i++ ) {
+ sentence[i] = dictionary.randomWord();
+ }
+ if ( sentenceLength == 1 ) {
+ return sentence[0];
+ }
+ else {
+ StringBuilder sb = new StringBuilder( sentence[0]);
+ for ( int i=1; i<sentenceLength; i++) {
+ sb.append( " " );
+ sb.append( sentence[i] );
+ }
+ return sb.toString();
+ }
+ }
+
+ /**
+ * Combines a random (gaussian) number of sentences in a period,
+ * using some punctuation symbols and
+ * capitalizing first char, terminating with dot and newline.
+ * @return
+ */
+ public String nextPeriod() {
+ int periodLengthSentences = r.nextInt( 7 ) - 2;
+ periodLengthSentences = ( periodLengthSentences < 1 ) ? 1 : periodLengthSentences;
+ String firstsentence = nextSentence();
+ StringBuilder sb = new StringBuilder()
+ .append( firstsentence.substring( 0,1 ).toUpperCase() )
+ .append( firstsentence.substring( 1 ) );
+ for ( int i=1; i<periodLengthSentences; i++ ) {
+ int separatorCharIndex = r.nextInt( sentenceSeparators.length );
+ sb
+ .append( sentenceSeparators[separatorCharIndex] )
+ .append( ' ' )
+ .append( nextSentence() );
+ }
+ sb.append( ".\n" );
+ return sb.toString();
+ }
+
+ //run it to get an idea of what this class is going to produce
+ public static void main(String[] args) {
+ SentenceInventor wi = new SentenceInventor( 7L, 10000 );
+ for (int i=0; i<30; i++) {
+ System.out.print( wi.nextPeriod() );
+ }
+ }
+
+}
+
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/SentenceInventor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/TextProductionTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/TextProductionTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/TextProductionTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,51 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util.textbuilder;
+
+import junit.framework.TestCase;
+
+/**
+ * Tests WordDictionary and WordInventor,
+ * these are test utilities not part of the Search distribution;
+ * the test exists to spot if the text they produce is unchanged, so
+ * that other tests can rely on working test utilities.
+ *
+ * @see WordDictionary
+ * @see SentenceInventor
+ *
+ * @author Sanne Grinovero
+ */
+public class TextProductionTest extends TestCase {
+
+ public void testSomeWordsGetBuilt() {
+ SentenceInventor wi = new SentenceInventor( 7L, 200 );
+ String randomPeriod = wi.nextPeriod();
+ // randomPeriod will be some random sentence like "Qoswo, orrmi ag ybwp bbtb kw qgtqaon lyhk nbv: qrqm flyui hyshm jmpqyb qmolml fjxw gnumocv Twwg."
+ // but exact string contents depends on environment
+ assertNotNull( randomPeriod );
+ assertTrue( randomPeriod.length() > 0 );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/TextProductionTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/WordDictionary.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/WordDictionary.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/WordDictionary.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,75 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.util.textbuilder;
+
+import java.io.Serializable;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.Random;
+import java.util.Set;
+
+/**
+ * Test utility meant to build a consistent dictionary of words.
+ * This is not just a random generator: like in natural
+ * languages shorter terms have a higher frequency in a text corpus
+ * and the dictionary size is limited.
+ *
+ * @author Sanne Grinovero
+ */
+public class WordDictionary {
+
+ private final String[] positionalWords;
+ private final int maxSize;
+ private final double gaussFactor;
+
+ private static final Random r = new Random( 12L );
+
+ public WordDictionary(Set<String> words) {
+ this.positionalWords = words.toArray( new String[0] );
+ //sort by String length. Languages use shorter terms more often.
+ Arrays.sort( positionalWords, new StringLengthComparator() );
+ maxSize = positionalWords.length;
+ gaussFactor = ((double)maxSize +1 ) / 4d ;
+ }
+
+ private static class StringLengthComparator implements Comparator<String>, Serializable {
+
+ public int compare(String o1, String o2) {
+ return o1.length()-o2.length();
+ }
+
+ }
+
+ public String randomWord() {
+ int position = Math.abs((int) ( r.nextGaussian() * gaussFactor ) );
+ if ( position < maxSize ) {
+ return positionalWords[position];
+ }
+ else {
+ return randomWord();
+ }
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/util/textbuilder/WordDictionary.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/AsyncWorkerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/AsyncWorkerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/AsyncWorkerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,47 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.Environment;
+import org.hibernate.cfg.Configuration;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class AsyncWorkerTest extends WorkerTestCase {
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( Environment.WORKER_SCOPE, "transaction" );
+ cfg.setProperty( Environment.WORKER_EXECUTION, "async" );
+ cfg.setProperty( Environment.WORKER_PREFIX + "thread_pool.size", "1" );
+ cfg.setProperty( Environment.WORKER_PREFIX + "buffer_queue.max", "10" );
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/AsyncWorkerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/ConcurrencyTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/ConcurrencyTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/ConcurrencyTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,78 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.Session;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class ConcurrencyTest extends SearchTestCase {
+
+ public void testMultipleEntitiesInSameIndex() throws Exception {
+ Session s = openSession( );
+ s.getTransaction().begin();
+ Drink d = new Drink();
+ d.setName( "Water" );
+ Food f = new Food();
+ f.setName( "Bread" );
+ s.persist( d );
+ s.persist( f );
+ s.getTransaction().commit();
+ s.close();
+
+ s = openSession( );
+ s.getTransaction().begin();
+ d = (Drink) s.get( Drink.class, d.getId() );
+ d.setName( "Coke" );
+ f = (Food) s.get( Food.class, f.getId() );
+ f.setName( "Cake" );
+ try {
+ s.getTransaction().commit();
+ }
+ catch (Exception e) {
+ //Check for error logs from JDBCTransaction
+ }
+ s.close();
+
+ s = openSession( );
+ s.getTransaction().begin();
+ d = (Drink) s.get( Drink.class, d.getId() );
+ s.delete( d );
+ f = (Food) s.get( Food.class, f.getId() );
+ s.delete( f );
+ s.getTransaction().commit();
+ s.close();
+
+ }
+
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Drink.class,
+ Food.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/ConcurrencyTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Drink.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Drink.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Drink.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "consumable")
+public class Drink {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED )
+ private String name;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Drink.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employee.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employee.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employee.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="employee")
+public class Employee {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private long id;
+
+ @Field(index = Index.TOKENIZED )
+ private String name;
+
+
+ public long getId() {
+ return id;
+ }
+
+ public void setId(long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employee.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employer.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employer.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employer.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.DocumentId;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index="employer")
+public class Employer {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private long id;
+
+ @Field(index = Index.TOKENIZED )
+ private String name;
+
+
+ public long getId() {
+ return id;
+ }
+
+ public void setId(long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Employer.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Food.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Food.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Food.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,66 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.GeneratedValue;
+
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * @author Emmanuel Bernard
+ */
+@Entity
+@Indexed(index = "consumable")
+public class Food {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private Integer id;
+
+ @Field(index = Index.TOKENIZED )
+ private String name;
+
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/Food.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/SyncWorkerTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/SyncWorkerTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/SyncWorkerTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,44 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import org.hibernate.cfg.Configuration;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.Environment;
+import org.apache.lucene.analysis.StopAnalyzer;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class SyncWorkerTest extends WorkerTestCase {
+
+ protected void configure(Configuration cfg) {
+ super.configure( cfg );
+ cfg.setProperty( "hibernate.search.default.directory_provider", RAMDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ cfg.setProperty( Environment.WORKER_SCOPE, "transaction" );
+ cfg.setProperty( Environment.WORKER_PREFIX, "sync" );
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/SyncWorkerTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/WorkerTestCase.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/WorkerTestCase.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/WorkerTestCase.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,207 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker;
+
+import java.io.File;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.lucene.analysis.StopAnalyzer;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.hibernate.Transaction;
+import org.hibernate.search.Environment;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.impl.FullTextSessionImpl;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.util.FileHelper;
+
+/**
+ * @author Emmanuel Bernard
+ */
+public class WorkerTestCase extends SearchTestCase {
+
+ protected void setUp() throws Exception {
+ File sub = getBaseIndexDir();
+ sub.mkdir();
+ File[] files = sub.listFiles();
+ for ( File file : files ) {
+ if ( file.isDirectory() ) {
+ FileHelper.delete( file );
+ }
+ }
+ //super.setUp(); //we need a fresh session factory each time for index set up
+ buildSessionFactory( getMappings(), getAnnotatedPackages(), getXmlFiles() );
+ }
+
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ File sub = getBaseIndexDir();
+ FileHelper.delete( sub );
+ }
+
+ public void testConcurrency() throws Exception {
+ int nThreads = 15;
+ ExecutorService es = Executors.newFixedThreadPool( nThreads );
+ Work work = new Work( getSessions() );
+ ReverseWork reverseWork = new ReverseWork( getSessions() );
+ long start = System.currentTimeMillis();
+ int iteration = 100;
+ for ( int i = 0; i < iteration; i++ ) {
+ es.execute( work );
+ es.execute( reverseWork );
+ }
+ while ( work.count < iteration - 1 ) {
+ Thread.sleep( 20 );
+ }
+ getSessions().close();
+ System.out.println( iteration + " iterations (8 tx per iteration) in " + nThreads + " threads: " + ( System
+ .currentTimeMillis() - start ) );
+ }
+
+ protected static class Work implements Runnable {
+ private SessionFactory sf;
+ public volatile int count = 0;
+
+ public Work(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ Employee ee = new Employee();
+ ee.setName( "Emmanuel" );
+ s.persist( ee );
+ Employer er = new Employer();
+ er.setName( "RH" );
+ s.persist( er );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ ee = (Employee) s.get( Employee.class, ee.getId() );
+ ee.setName( "Emmanuel2" );
+ er = (Employer) s.get( Employer.class, er.getId() );
+ er.setName( "RH2" );
+ tx.commit();
+ s.close();
+
+// try {
+// Thread.sleep( 50 );
+// }
+// catch (InterruptedException e) {
+// e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+// }
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ FullTextSession fts = new FullTextSessionImpl( s );
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "id", SearchTestCase.stopAnalyzer );
+ Query query;
+ try {
+ query = parser.parse( "name:emmanuel2" );
+ }
+ catch (ParseException e) {
+ throw new RuntimeException( e );
+ }
+ boolean results = fts.createFullTextQuery( query ).list().size() > 0;
+ //don't test because in case of async, it query happens before actual saving
+ //if ( !results ) throw new RuntimeException( "No results!" );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ ee = (Employee) s.get( Employee.class, ee.getId() );
+ s.delete( ee );
+ er = (Employer) s.get( Employer.class, er.getId() );
+ s.delete( er );
+ tx.commit();
+ s.close();
+ count++;
+ }
+ }
+
+ protected static class ReverseWork implements Runnable {
+ private SessionFactory sf;
+
+ public ReverseWork(SessionFactory sf) {
+ this.sf = sf;
+ }
+
+ public void run() {
+ Session s = sf.openSession();
+ Transaction tx = s.beginTransaction();
+ Employer er = new Employer();
+ er.setName( "RH" );
+ s.persist( er );
+ Employee ee = new Employee();
+ ee.setName( "Emmanuel" );
+ s.persist( ee );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ er = (Employer) s.get( Employer.class, er.getId() );
+ er.setName( "RH2" );
+ ee = (Employee) s.get( Employee.class, ee.getId() );
+ ee.setName( "Emmanuel2" );
+ tx.commit();
+ s.close();
+
+ s = sf.openSession();
+ tx = s.beginTransaction();
+ er = (Employer) s.get( Employer.class, er.getId() );
+ s.delete( er );
+ ee = (Employee) s.get( Employee.class, ee.getId() );
+ s.delete( ee );
+ tx.commit();
+ s.close();
+ }
+ }
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ File sub = getBaseIndexDir();
+ cfg.setProperty( "hibernate.search.default.indexBase", sub.getAbsolutePath() );
+ cfg.setProperty( "hibernate.search.Clock.directory_provider", FSDirectoryProvider.class.getName() );
+ cfg.setProperty( Environment.ANALYZER_CLASS, StopAnalyzer.class.getName() );
+ }
+
+ @SuppressWarnings("unchecked")
+ protected Class<?>[] getMappings() {
+ return new Class[]{
+ Employee.class,
+ Employer.class
+ };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/WorkerTestCase.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/EmailAddress.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/EmailAddress.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/EmailAddress.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,80 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker.duplication;
+
+import java.io.Serializable;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+import org.hibernate.search.annotations.Store;
+
+/**
+ * Test entity for HSEARCH-257.
+ *
+ * @author Hardy Ferentschik
+ */
+@Entity
+public class EmailAddress implements Serializable {
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+
+ private boolean isDefaultAddress;
+
+ @Field(store = Store.YES, index = Index.NO)
+ private String address;
+
+ public EmailAddress() {
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getAddress() {
+ return address;
+ }
+
+ public void setAddress(String address) {
+ this.address = address;
+ }
+
+ public boolean isDefaultAddress() {
+ return isDefaultAddress;
+ }
+
+ public void setDefaultAddress(boolean isDefault) {
+ isDefaultAddress = isDefault;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/EmailAddress.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/Person.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/Person.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/Person.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,105 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker.duplication;
+
+import javax.persistence.DiscriminatorColumn;
+import javax.persistence.DiscriminatorType;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.Inheritance;
+import javax.persistence.InheritanceType;
+import javax.persistence.Table;
+import javax.persistence.OneToOne;
+import javax.persistence.JoinColumn;
+import javax.persistence.CascadeType;
+import javax.persistence.FetchType;
+
+import org.hibernate.search.annotations.DocumentId;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
+
+/**
+ * Test entity for HSEARCH-257.
+ *
+ * @author Marina Vatkina
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Table
+@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
+@DiscriminatorColumn(name = "DISC", discriminatorType = DiscriminatorType.STRING)
+public class Person {
+
+ @Id
+ @GeneratedValue
+ @DocumentId
+ private int id;
+
+ @Field(index = Index.TOKENIZED, name = "Content")
+ private String name;
+
+ @OneToOne(fetch = FetchType.EAGER, cascade = {
+ CascadeType.MERGE,
+ CascadeType.PERSIST
+ })
+ @JoinColumn(name = "DEFAULT_EMAILADDRESS_FK")
+ private EmailAddress defaultEmailAddress;
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * This function return the value of defaultEmailAddress.
+ *
+ * @return the defaultEmailAddress
+ */
+
+ public EmailAddress getDefaultEmailAddress() {
+ return defaultEmailAddress;
+ }
+
+ /**
+ * This function sets the value of the defaultEmailAddress.
+ *
+ * @param defaultEmailAddress the defaultEmailAddress to set
+ */
+ protected void setDefaultEmailAddress(EmailAddress defaultEmailAddress) {
+ this.defaultEmailAddress = defaultEmailAddress;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/Person.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/SpecialPerson.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/SpecialPerson.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/SpecialPerson.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,150 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker.duplication;
+
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.CascadeType;
+import javax.persistence.DiscriminatorValue;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.OneToMany;
+
+import org.hibernate.annotations.Cascade;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+
+/**
+ * Test entity for HSEARCH-257.
+ *
+ * @author Hardy Ferentschik
+ */
+@Entity
+@Indexed
+@DiscriminatorValue("SpecialPerson")
+public class SpecialPerson extends Person {
+
+ @OneToMany(fetch = FetchType.EAGER, cascade = { CascadeType.ALL })
+ @Cascade(org.hibernate.annotations.CascadeType.DELETE_ORPHAN)
+ @JoinColumn(name = "SPECIALPERSON_FK")
+ @IndexedEmbedded
+ private Set<EmailAddress> emailAddressSet = new HashSet<EmailAddress>();
+
+ public Set<EmailAddress> getEmailAddressSet() {
+ return emailAddressSet;
+ }
+
+ public void setEmailAddressSet(Set<EmailAddress> emailAddresses) {
+ EmailAddress defaultVal = getDefaultEmailAddressFromList( emailAddresses );
+
+ super.setDefaultEmailAddress( defaultVal );
+
+ emailAddressSet = emailAddresses;
+ }
+
+ /**
+ * This function add the provided emailAddress to the existing set.
+ *
+ * @param emailAddress EmailAddress to add the the set
+ */
+ public void addEmailAddress(EmailAddress emailAddress) {
+ if ( emailAddress != null ) {
+ if ( emailAddressSet == null ) {
+ emailAddressSet = new HashSet<EmailAddress>();
+ }
+
+ // We cannot add another default address to the list. Check if
+ // default
+ // address has been set before.
+ if ( emailAddress.isDefaultAddress() ) {
+ // Replace old default address with new one.
+ processDefaultEmailAddress( emailAddress, emailAddressSet );
+
+ super.setDefaultEmailAddress( emailAddress );
+ }
+ else {
+ emailAddressSet.add( emailAddress );
+ }
+ }
+ }
+
+ private void processDefaultEmailAddress(EmailAddress defaultVal,
+ Set<EmailAddress> list) {
+ if ( defaultVal != null ) {
+ boolean addToList = true;
+
+ for ( EmailAddress aList : list ) {
+
+ if ( defaultVal.equals( aList ) ) {
+ aList.setDefaultAddress( true );
+ addToList = false;
+ }
+ else if ( aList.isDefaultAddress() ) {
+ // Reset default value.
+ aList.setDefaultAddress( false );
+ }
+ }
+
+ // Add Email Address to the list if list does not contain it.
+ if ( addToList ) {
+ list.add( defaultVal );
+ }
+ }
+ }
+
+ private EmailAddress getDefaultEmailAddressFromList(
+ Set<EmailAddress> list) {
+ EmailAddress address = null;
+ EmailAddress firstAddressInList = null;
+ boolean found = false;
+
+ if ( list != null ) {
+ for ( EmailAddress aList : list ) {
+ address = aList;
+
+ if ( address != null ) {
+ if ( firstAddressInList == null ) {
+ firstAddressInList = address;
+ }
+
+ if ( address.isDefaultAddress() ) {
+ found = true;
+ break;
+ }
+ }
+ }
+
+ if ( !found && firstAddressInList != null ) {
+ // If default address was not found we set the first one as
+ // default.
+ firstAddressInList.setDefaultAddress( true );
+ address = firstAddressInList;
+ }
+ }
+
+ return address;
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/SpecialPerson.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,158 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker.duplication;
+
+import java.util.List;
+import java.util.ArrayList;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.TopDocs;
+import org.hibernate.Transaction;
+import org.hibernate.search.FullTextQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.backend.WorkType;
+import org.hibernate.search.backend.LuceneWork;
+import org.hibernate.search.backend.AddLuceneWork;
+import org.hibernate.search.backend.DeleteLuceneWork;
+import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
+import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Testcase for HSEARCH-257.
+ */
+public class WorkDuplicationTest extends SearchTestCase {
+
+ /**
+ * This test assures that HSEARCH-257. Before the fix Search would issue another <code>AddLuceneWork</code> after
+ * the <code>DeleteLuceneWork</code>. This lead to the fact that after the deletion there was still a Lucene document
+ * in the index.
+ *
+ * @throws Exception in case the test fails.
+ */
+ public void testNoWorkDuplication() throws Exception {
+
+ FullTextSession s = org.hibernate.search.Search.getFullTextSession( openSession() );
+ Transaction tx = s.beginTransaction();
+
+ // create new customer
+ SpecialPerson person = new SpecialPerson();
+ person.setName( "Joe Smith" );
+
+ EmailAddress emailAddress = new EmailAddress();
+ emailAddress.setAddress( "foo(a)foobar.com" );
+ emailAddress.setDefaultAddress(true);
+
+ person.addEmailAddress( emailAddress );
+
+ // persist the customer
+ s.persist( person );
+ tx.commit();
+
+ // search if the record made it into the index
+ tx = s.beginTransaction();
+ String searchQuery = "Joe";
+ QueryParser parser = new QueryParser( getTargetLuceneVersion(), "Content", SearchTestCase.standardAnalyzer );
+ Query luceneQuery = parser.parse( searchQuery );
+ FullTextQuery query = s.createFullTextQuery( luceneQuery );
+ List results = query.list();
+ assertTrue( "We should have a hit", results.size() == 1 );
+ tx.commit();
+
+ // Now try to delete
+ tx = s.beginTransaction();
+ int id = person.getId();
+ person = ( SpecialPerson ) s.get( SpecialPerson.class, id );
+ s.delete( person );
+ tx.commit();
+
+ // Search and the record via Lucene directly
+ tx = s.beginTransaction();
+
+ DirectoryProvider directoryProvider = s.getSearchFactory().getDirectoryProviders( SpecialPerson.class )[0];
+ ReaderProvider readerProvider = s.getSearchFactory().getReaderProvider();
+ IndexReader reader = readerProvider.openReader( directoryProvider );
+ IndexSearcher searcher = new IndexSearcher( reader );
+
+ try {
+ // we have to test using Lucene directly since query loaders will ignore hits for which there is no
+ // database entry
+ TopDocs topDocs = searcher.search( luceneQuery, null, 1 );
+ assertTrue( "We should have no hit", topDocs.totalHits == 0 );
+ }
+ finally {
+ readerProvider.closeReader( reader );
+ }
+ tx.commit();
+ s.close();
+ }
+
+ /**
+ * Tests that adding and deleting the same entity only results into a single delete in the work queue.
+ * See HSEARCH-293.
+ *
+ * @throws Exception in case the test fails.
+ */
+ @SuppressWarnings( "unchecked" )
+ public void testAddWorkGetReplacedByDeleteWork() throws Exception {
+ FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession( openSession() );
+ SearchFactoryImpl searchFactory = ( SearchFactoryImpl ) fullTextSession.getSearchFactory();
+ DocumentBuilderIndexedEntity builder = searchFactory.getDocumentBuilderIndexedEntity( SpecialPerson.class );
+
+ // create test entity
+ SpecialPerson person = new SpecialPerson();
+ person.setName( "Joe Smith" );
+
+ EmailAddress emailAddress = new EmailAddress();
+ emailAddress.setAddress( "foo(a)foobar.com" );
+ emailAddress.setDefaultAddress(true);
+
+ person.addEmailAddress( emailAddress );
+
+ List<LuceneWork> queue = new ArrayList<LuceneWork>();
+
+ builder.addWorkToQueue( SpecialPerson.class, person, 1, WorkType.ADD, queue, searchFactory );
+
+ assertEquals("There should only be one job in the queue", 1, queue.size());
+ assertTrue("Wrong job type", queue.get(0) instanceof AddLuceneWork );
+
+ builder.addWorkToQueue( SpecialPerson.class, person, 1, WorkType.DELETE, queue, searchFactory );
+
+ assertEquals("There should only be one job in the queue", 1, queue.size());
+ assertTrue("Wrong job type. Add job should have been replaced by delete.", queue.get(0) instanceof DeleteLuceneWork );
+
+ fullTextSession.close();
+ }
+
+
+ protected Class<?>[] getMappings() {
+ return new Class[] { Person.class, EmailAddress.class, SpecialPerson.class };
+ }
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkDuplicationTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkSequencesTest.java
===================================================================
--- search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkSequencesTest.java (rev 0)
+++ search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkSequencesTest.java 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,148 @@
+/* $Id$
+ *
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+ */
+package org.hibernate.search.test.worker.duplication;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.hibernate.Session;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.SearchFactory;
+import org.hibernate.search.reader.ReaderProvider;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.test.SearchTestCase;
+import org.hibernate.search.test.session.Domain;
+
+/**
+ * Testcase for HSEARCH-353
+ * Verify that different kinds of work (add/delete) found in the same
+ * queue are all executed; having special care about different entities
+ * being deleted/persisted but sharing the same PK (to replace the old
+ * instance with another one).
+ *
+ * @author Sanne Grinovero
+ */
+public class WorkSequencesTest extends SearchTestCase {
+
+ private SearchFactory searchFactory;
+
+ public void testComplexTransactionSequence() throws IOException {
+ Session classicSession = openSession( );
+ FullTextSession session = Search.getFullTextSession( classicSession );
+ searchFactory = session.getSearchFactory();
+
+ // create some different domains:
+ {
+ session.beginTransaction();
+ session.persist( new Domain( 1, "jboss.org" ) );
+ session.persist( new Domain( 2, "jboss.com" ) );
+ session.persist( new Domain( 3, "hibernate.org" ) );
+ session.persist( new Domain( 4, "geocities.com" ) );
+ session.getTransaction().commit();
+ }
+ assertEquals( 2, countDomainsByFullText( "jboss" ) );
+ assertEquals( 1, countDomainsByFullText( "hibernate" ) );
+ assertEquals( 1, countDomainsByFullText( "geocities" ) );
+
+ // now create some and delete others:
+ {
+ session.beginTransaction();
+ session.persist( new Domain( 5, "sun.com" ) );
+ session.persist( new Domain( 6, "mysql.com" ) );
+ session.persist( new Domain( 7, "oracle.com" ) );
+ Domain hibernateDomain = (Domain) session.get( Domain.class, 3 );
+ session.delete( hibernateDomain );
+ Domain geocitiesDomain = (Domain) session.get( Domain.class, 4 );
+ session.delete( geocitiesDomain );
+ session.getTransaction().commit();
+ }
+ assertEquals( 0, countDomainsByFullText( "hibernate" ) );
+ assertEquals( 0, countDomainsByFullText( "geocities" ) );
+ assertEquals( 2, countDomainsByFullText( "jboss" ) );
+ assertEquals( 1, countDomainsByFullText( "sun" ) );
+ assertEquals( 1, countDomainsByFullText( "mysql" ) );
+ assertEquals( 1, countDomainsByFullText( "oracle" ) );
+
+ // use create/update/delete:
+ {
+ session.beginTransaction();
+ session.persist( new Domain( 3, "hibernate.org" ) );
+ Domain mysqlDomain = (Domain) session.get( Domain.class, 6 );
+ session.delete( mysqlDomain );
+ //persisting a new entity having the same PK as a deleted one:
+ session.persist( new Domain( 6, "myhql.org" ) );
+ Domain sunDomain = (Domain) session.get( Domain.class, 5 );
+ sunDomain.setName( "community.oracle.com" );
+ session.getTransaction().commit();
+ }
+ assertEquals( 1, countDomainsByFullText( "hibernate" ) );
+ assertEquals( 2, countDomainsByFullText( "oracle" ) );
+ assertEquals( 1, countDomainsByFullText( "myhql" ) );
+ assertEquals( 1, countDomainsByFullText( "community" ) );
+ assertEquals( 0, countDomainsByFullText( "mysql" ) );
+
+ // now creating and deleting the "same" (as by pk) entity several times in same transaction:
+ {
+ session.beginTransaction();
+ session.persist( new Domain( 8, "mysql.org" ) );
+ Domain mysqlDomain = (Domain) session.load( Domain.class, 8 );
+ session.delete( mysqlDomain );
+ Domain newDomain = new Domain( 8, "something.org" );
+ session.persist( newDomain );
+ session.delete( newDomain );
+ session.persist( new Domain( 8, "somethingnew.org" ) );
+ session.getTransaction().commit();
+ }
+ assertEquals( 1, countDomainsByFullText( "somethingnew" ) );
+
+ session.close();
+ }
+
+ //helper method to verify how many instances are found in the index by doing a simple FT query
+ private int countDomainsByFullText(String name) throws IOException {
+ Query luceneQuery = new TermQuery( new Term( "name", name ) );
+ DirectoryProvider<?> directoryProvider = searchFactory.getDirectoryProviders( Domain.class )[0];
+ ReaderProvider readerProvider = searchFactory.getReaderProvider();
+ IndexReader reader = readerProvider.openReader( directoryProvider );
+ IndexSearcher searcher = new IndexSearcher( reader );
+ TopDocs topDocs = searcher.search( luceneQuery, null, 100 );
+ readerProvider.closeReader( reader );
+ return topDocs.totalHits;
+ }
+
+ @Override
+ protected Class<?>[] getMappings() {
+ return new Class[] {
+ Domain.class
+ };
+ }
+
+}
Property changes on: search/trunk/hibernate-search/src/test/java/org/hibernate/search/test/worker/duplication/WorkSequencesTest.java
___________________________________________________________________
Name: svn:keywords
+ Id
Property changes on: search/trunk/hibernate-search/src/test/resources
___________________________________________________________________
Name: svn:mergeinfo
+
Added: search/trunk/hibernate-search/src/test/resources/hibernate.properties
===================================================================
--- search/trunk/hibernate-search/src/test/resources/hibernate.properties (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/hibernate.properties 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,31 @@
+################################################################################
+# Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved. #
+# #
+# This copyrighted material is made available to anyone wishing to use, modify,#
+# copy, or redistribute it subject to the terms and conditions of the GNU #
+# Lesser General Public License, v. 2.1. This program is distributed in the #
+# hope that it will be useful, but WITHOUT A WARRANTY; without even the implied#
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
+# Lesser General Public License for more details. You should have received a #
+# copy of the GNU Lesser General Public License, v.2.1 along with this #
+# distribution; if not, write to the Free Software Foundation, Inc., #
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. #
+# #
+# Red Hat Author(s): Steve Ebersole #
+################################################################################
+hibernate.dialect ${db.dialect}
+hibernate.connection.driver_class ${jdbc.driver}
+hibernate.connection.url ${jdbc.url}
+hibernate.connection.username ${jdbc.user}
+hibernate.connection.password ${jdbc.pass}
+hibernate.connection.isolation ${jdbc.isolation}
+
+hibernate.connection.pool_size 5
+
+hibernate.show_sql true
+hibernate.format_sql true
+
+hibernate.max_fetch_depth 5
+
+hibernate.cache.region_prefix hibernate.test
+hibernate.cache.provider_class org.hibernate.cache.HashtableCacheProvider
Added: search/trunk/hibernate-search/src/test/resources/jndi.properties
===================================================================
--- search/trunk/hibernate-search/src/test/resources/jndi.properties (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/jndi.properties 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,12 @@
+java.naming.factory.initial=org.apache.activemq.jndi.ActiveMQInitialContextFactory
+java.naming.provider.url=vm://localhost
+
+# use the following property to specify the JNDI name the connection factory
+# should appear as.
+connectionFactoryNames = ConnectionFactory, java:/ConnectionFactory
+
+# register some queues in JNDI using the form
+# queue.[jndiName] = [physicalName]
+queue.queue/searchtest = searchQueue
+
+
Added: search/trunk/hibernate-search/src/test/resources/log4j.properties
===================================================================
--- search/trunk/hibernate-search/src/test/resources/log4j.properties (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/log4j.properties 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,54 @@
+### direct log messages to stdout ###
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+
+### direct messages to file hibernate.log ###
+log4j.appender.file=org.apache.log4j.FileAppender
+log4j.appender.file.File=hibernate.log
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+
+### direct messages to socket - chainsaw ###
+log4j.appender.socket=org.apache.log4j.net.SocketAppender
+log4j.appender.socket.remoteHost=localhost
+log4j.appender.socket.port=4560
+log4j.appender.socket.locationInfo=true
+
+### set log levels - for more verbose logging change 'info' to 'debug' ###
+
+log4j.rootLogger=warn, stdout
+log4j.logger.org.jboss=info
+#log4j.logger.com.jboss=debug
+
+log4j.logger.org.hibernate=info
+
+#log4j.logger.org.hibernate.search=debug
+
+
+### log just the SQL
+#log4j.logger.org.hibernate.SQL=debug
+
+#log4j.logger.org.hibernate.engine.CascadingAction=debug
+
+### log JDBC bind parameters ###
+#log4j.logger.org.hibernate.type=debug
+
+### log schema export/update ###
+log4j.logger.org.hibernate.tool.hbm2ddl=warn
+
+### log cache activity ###
+#log4j.logger.org.hibernate.cache=debug
+
+### enable the following line if you want to track down connection ###
+### leakages when using DriverManagerConnectionProvider ###
+#log4j.logger.org.hibernate.connection.DriverManagerConnectionProvider=trace
+
+### annotation logs
+#log4j.logger.org.hibernate.annotation=info
+#log4j.logger.org.hibernate.cfg=info
+#log4j.logger.org.hibernate.cfg.SettingsFactory=info
+#log4j.logger.org.hibernate.cfg.AnnotationBinder=info
+#log4j.logger.org.hibernate.cfg.AnnotationConfiguration=info
+#log4j.logger.org.hibernate.cfg.Ejb3Column=info
Added: search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/stoplist.properties
===================================================================
--- search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/stoplist.properties (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/stoplist.properties 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,33 @@
+a
+an
+and
+are
+as
+at
+be
+but
+by
+for
+if
+in
+into
+is
+it
+no
+not
+of
+on
+or
+such
+that
+the
+their
+then
+there
+these
+they
+this
+to
+was
+will
+with
\ No newline at end of file
Added: search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/synonyms.properties
===================================================================
--- search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/synonyms.properties (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/analyzer/solr/synonyms.properties 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,2 @@
+ipod, i-pod
+universe , cosmos
Added: search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/Animal.hbm.xml
===================================================================
--- search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/Animal.hbm.xml (rev 0)
+++ search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/Animal.hbm.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<!DOCTYPE hibernate-mapping PUBLIC
+ "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
+ "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
+
+<hibernate-mapping package="org.hibernate.search.test.classloading">
+ <class name="Animal">
+ <id name="id" type="java.lang.Long">
+ <generator class="increment"/>
+ </id>
+ <property name="name"/>
+ </class>
+</hibernate-mapping>
\ No newline at end of file
Property changes on: search/trunk/hibernate-search/src/test/resources/org/hibernate/search/test/classloading/Animal.hbm.xml
___________________________________________________________________
Name: svn:mergeinfo
+
Modified: search/trunk/hibernate-search-archetype/pom.xml
===================================================================
--- search/trunk/hibernate-search-archetype/pom.xml 2010-03-15 20:36:48 UTC (rev 19001)
+++ search/trunk/hibernate-search-archetype/pom.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -6,7 +6,7 @@
<artifactId>hibernate-search-quickstart</artifactId>
<packaging>jar</packaging>
<version>3.2.0-SNAPSHOT</version>
- <name>A custom project</name>
+ <name>Hibernate Search Archtype</name>
<url>http://www.myorganization.org</url>
<properties>
Modified: search/trunk/pom.xml
===================================================================
--- search/trunk/pom.xml 2010-03-15 20:36:48 UTC (rev 19001)
+++ search/trunk/pom.xml 2010-03-16 01:28:07 UTC (rev 19002)
@@ -1,52 +1,61 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
- * Hibernate, Relational Persistence for Idiomatic Java
- *
- * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
- * indicated by the @author tags or express copyright attribution
- * statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat, Inc.
- *
- * This copyrighted material is made available to anyone wishing to use, modify,
- * copy, or redistribute it subject to the terms and conditions of the GNU
- * Lesser General Public License, as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
- * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
- * for more details.
- *
- * You should have received a copy of the GNU Lesser General Public License
- * along with this distribution; if not, write to:
- * Free Software Foundation, Inc.
- * 51 Franklin Street, Fifth Floor
- * Boston, MA 02110-1301 USA
- -->
+ * Hibernate, Relational Persistence for Idiomatic Java
+ *
+ * Copyright (c) 2009, Red Hat, Inc. and/or its affiliates or third-party contributors as
+ * indicated by the @author tags or express copyright attribution
+ * statements applied by the authors. All third-party contributions are
+ * distributed under license by Red Hat, Inc.
+ *
+ * This copyrighted material is made available to anyone wishing to use, modify,
+ * copy, or redistribute it subject to the terms and conditions of the GNU
+ * Lesser General Public License, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this distribution; if not, write to:
+ * Free Software Foundation, Inc.
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301 USA
+-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
+
<groupId>org.hibernate</groupId>
- <artifactId>hibernate-search</artifactId>
+ <artifactId>hibernate-search-parent</artifactId>
<version>3.2.0-SNAPSHOT</version>
- <name>Hibernate Search</name>
- <description>Hibernate Search</description>
+ <packaging>pom</packaging>
+
+ <name>Hibernate Search Parent</name>
+ <description>Hibernate Search Parent POM</description>
<url>http://search.hibernate.org</url>
-
+
+ <modules>
+ <module>hibernate-search</module>
+ <module>hibernate-search-archetype</module>
+ </modules>
+
<issueManagement>
<system>JIRA</system>
<url>http://opensource.atlassian.com/projects/hibernate/browse/HSEARCH</url>
</issueManagement>
+
<scm>
<connection>scm:svn:http://anonsvn.jboss.org/repos/hibernate/search/trunk</connection>
<developerConnection>scm:svn:https://svn.jboss.org/repos/hibernate/search/trunk</developerConnection>
<url>http://fisheye.jboss.com/browse/Hibernate/search/trunk</url>
</scm>
-
+
<organization>
<name>Hibernate</name>
<url>http://www.hibernate.org</url>
</organization>
-
+
<licenses>
<license>
<name>GNU Lesser General Public License</name>
@@ -54,7 +63,7 @@
<comments>See discussion at http://hibernate.org/356.html for more details.</comments>
</license>
</licenses>
-
+
<ciManagement>
<system>Hudson</system>
<url>http://hudson.jboss.org/hudson/view/hibernate/job/hibernate-search-trunk/</url>
@@ -78,7 +87,7 @@
<url>http://in.relation.to/Bloggers/Sanne</url>
</developer>
</developers>
-
+
<mailingLists>
<mailingList>
<name>Hibernate Announcements</name>
@@ -112,7 +121,7 @@
<archive>http://lists.jboss.org/pipermail/hibernate-issues/</archive>
</mailingList>
</mailingLists>
-
+
<properties>
<slf4jVersion>1.5.8</slf4jVersion>
<luceneVersion>2.9.2</luceneVersion>
@@ -120,58 +129,14 @@
<hibernateCommonsAnnotationVersion>3.2.0.Final</hibernateCommonsAnnotationVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
-
+
<dependencies>
- <!-- =============================== -->
- <!-- Required Dependencies -->
- <!-- =============================== -->
<dependency>
- <groupId>org.hibernate</groupId>
- <artifactId>hibernate-core</artifactId>
- <version>${hibernateVersion}</version>
- </dependency>
- <dependency>
- <groupId>org.hibernate</groupId>
- <artifactId>hibernate-commons-annotations</artifactId>
- <version>${hibernateCommonsAnnotationVersion}</version>
- </dependency>
- <dependency>
- <groupId>org.hibernate.java-persistence</groupId>
- <artifactId>jpa-api</artifactId>
- <version>2.0-cr-1</version>
- </dependency>
- <dependency>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-core</artifactId>
- <version>${luceneVersion}</version>
- </dependency>
- <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4jVersion}</version>
</dependency>
<dependency>
- <groupId>javax.transaction</groupId>
- <artifactId>jta</artifactId>
- <version>1.1</version>
- </dependency>
-
- <!-- =============================== -->
- <!-- Testing Dependencies -->
- <!-- =============================== -->
- <dependency>
- <groupId>org.apache.activemq</groupId>
- <artifactId>activemq-core</artifactId>
- <version>5.2.0</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
@@ -183,15 +148,144 @@
<version>${slf4jVersion}</version>
<scope>test</scope>
</dependency>
- </dependencies>
-
+ </dependencies>
+
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-core</artifactId>
+ <version>${hibernateVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-commons-annotations</artifactId>
+ <version>${hibernateCommonsAnnotationVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate.java-persistence</groupId>
+ <artifactId>jpa-api</artifactId>
+ <version>2.0-cr-1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-core</artifactId>
+ <version>${luceneVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.transaction</groupId>
+ <artifactId>jta</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.activemq</groupId>
+ <artifactId>activemq-core</artifactId>
+ <version>5.2.0</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-annotations</artifactId>
+ <version>${hibernateVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-entitymanager</artifactId>
+ <version>${hibernateVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-common</artifactId>
+ <version>1.3.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-core</artifactId>
+ <version>1.3.0</version>
+ <optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-solrj</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>woodstox</groupId>
+ <artifactId>wstx-asl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>net.java.dev.stax-utils</groupId>
+ <artifactId>stax-utils</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.solr</groupId>
+ <artifactId>solr-lucene-core</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-snowball</artifactId>
+ <version>${luceneVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analyzers</artifactId>
+ <version>${luceneVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>1.3.2</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.jms</groupId>
+ <artifactId>jms</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>jgroups</groupId>
+ <artifactId>jgroups</artifactId>
+ <version>2.6.7.GA</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.annotation</groupId>
+ <artifactId>jsr250-api</artifactId>
+ <version>1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>javassist</groupId>
+ <artifactId>javassist</artifactId>
+ <version>3.4.GA</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+
<build>
<defaultGoal>test</defaultGoal>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
- <version>2.0.2</version>
+ <version>2.1</version>
<configuration>
<source>1.5</source>
<target>1.5</target>
@@ -212,127 +306,9 @@
</manifestEntries>
</archive>
</configuration>
- <executions>
- <execution>
- <id>build-test-jar</id>
- <goals>
- <goal>test-jar</goal>
- </goals>
- <configuration>
- <archive>
- <manifestEntries>
- <Implementation-Title>${name} testsuite</Implementation-Title>
- <Implementation-Version>${version}</Implementation-Version>
- <Implementation-Vendor>hibernate.org</Implementation-Vendor>
- <Implementation-Vendor-Id>hibernate.org</Implementation-Vendor-Id>
- <Implementation-URL>http://search.hibernate.org</Implementation-URL>
- </manifestEntries>
- </archive>
- </configuration>
- </execution>
- </executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.4.3</version>
- <configuration>
- <forkMode>once</forkMode>
- <redirectTestOutputToFile>true</redirectTestOutputToFile>
- <systemProperties>
- <property>
- <name>build.dir</name>
- <value>${basedir}/target</value>
- </property>
- <!--
- Following is the default jgroups mcast address. If you find the testsuite runs very slowly,
- there may be problems with multicast on the interface JGroups uses by default on
- your machine. You can try to resolve setting 'jgroups.bind_addr' as a system-property
- to the jvm launching maven and setting the value to an interface where you know multicast works
- -->
- <property>
- <name>jgroups.bind_addr</name>
- <value>127.0.0.1</value>
- </property>
- <!-- There are problems with multicast and IPv6 on some OS/JDK combos, so we tell Java
- to use IPv4. If you have problems with multicast when running the tests you can
- try setting this to 'false', although typically that won't be helpful.
- -->
- <property>
- <name>java.net.preferIPv4Stack</name>
- <value>true</value>
- </property>
- </systemProperties>
- <excludes>
- <exclude>**/*.java</exclude>
- </excludes>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jboss.maven.plugins</groupId>
- <artifactId>maven-jdocbook-plugin</artifactId>
- <version>2.2.0</version>
- <extensions>true</extensions>
- <dependencies>
- <dependency>
- <groupId>org.hibernate</groupId>
- <artifactId>hibernate-jdocbook-style</artifactId>
- <version>2.0.0</version>
- <type>jdocbook-style</type>
- </dependency>
- </dependencies>
- <configuration>
- <sourceDocumentName>master.xml</sourceDocumentName>
- <sourceDirectory>${basedir}/src/main/docbook</sourceDirectory>
- <masterTranslation>en-US</masterTranslation>
- <translations>
- <translation>zh-CN</translation>
- </translations>
- <imageResource>
- <directory>${basedir}/src/main/docbook/en-US/images</directory>
- </imageResource>
- <formats>
- <format>
- <formatName>pdf</formatName>
- <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/pdf.xsl</stylesheetResource>
- <finalName>hibernate_reference.pdf</finalName>
- </format>
- <format>
- <formatName>html_single</formatName>
- <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/xhtml-single.xsl
- </stylesheetResource>
- <finalName>index.html</finalName>
- </format>
- <format>
- <formatName>html</formatName>
- <stylesheetResource>classpath:/xslt/org/hibernate/jdocbook/xslt/xhtml.xsl
- </stylesheetResource>
- <finalName>index.html</finalName>
- </format>
- </formats>
- <options>
- <xincludeSupported>true</xincludeSupported>
- <xmlTransformerType>saxon</xmlTransformerType>
- <!-- needed for uri-resolvers; can be ommitted if using 'current' uri scheme -->
- <!-- could also locate the docbook dependency and inspect its version... -->
- <docbookVersion>1.72.0</docbookVersion>
- <localeSeparator>-</localeSeparator>
- </options>
- </configuration>
- <executions>
- <execution>
- <id>make-doc</id>
- <phase>site</phase>
- <goals>
- <goal>translate</goal>
- <goal>resources</goal>
- <goal>generate</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.1</version>
<executions>
@@ -349,9 +325,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>2.5</version>
- <configuration>
- <stylesheetfile>${basedir}/src/main/javadoc/jdstyle.css</stylesheetfile>
- </configuration>
<executions>
<execution>
<id>make-javadoc</id>
@@ -362,66 +335,77 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.2-beta-2</version>
- <configuration>
- <descriptors>
- <descriptor>src/main/assembly/dist.xml</descriptor>
- </descriptors>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-release-plugin</artifactId>
- <version>2.0-beta-9</version>
- <configuration>
- <releaseProfiles>release</releaseProfiles>
- <goals>package javadoc:javadoc org.jboss.maven.plugins:maven-jdocbook-plugin:2.2.0:resources
- org.jboss.maven.plugins:maven-jdocbook-plugin:2.2.0:generate assembly:assembly
- </goals>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jboss.maven.plugins</groupId>
- <artifactId>maven-injection-plugin</artifactId>
- <version>1.0.2</version>
- <executions>
- <execution>
- <phase>compile</phase>
- <goals>
- <goal>bytecode</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <bytecodeInjections>
- <bytecodeInjection>
- <expression>${pom.version}</expression>
- <targetMembers>
- <methodBodyReturn>
- <className>org.hibernate.search.Version</className>
- <methodName>getVersionString</methodName>
- </methodBodyReturn>
- </targetMembers>
- </bytecodeInjection>
- </bytecodeInjections>
- </configuration>
- </plugin>
</plugins>
- <testResources>
- <testResource>
- <filtering>true</filtering>
- <directory>src/test/resources</directory>
- <includes>
- <include>**/*.properties</include>
- <include>**/*.xml</include>
- </includes>
- </testResource>
- </testResources>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.4.3</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.maven.plugins</groupId>
+ <artifactId>maven-jdocbook-plugin</artifactId>
+ <version>2.2.0</version>
+ <extensions>true</extensions>
+ <dependencies>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-jdocbook-style</artifactId>
+ <version>2.0.0</version>
+ <type>jdocbook-style</type>
+ </dependency>
+ </dependencies>
+ <executions>
+ <execution>
+ <id>make-doc</id>
+ <phase>site</phase>
+ <goals>
+ <goal>translate</goal>
+ <goal>resources</goal>
+ <goal>generate</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-release-plugin</artifactId>
+ <version>2.0-beta-9</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jboss.maven.plugins</groupId>
+ <artifactId>maven-injection-plugin</artifactId>
+ <version>1.0.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>2.5</version>
+ <executions>
+ <execution>
+ <id>make-javadoc</id>
+ <phase>package</phase>
+ <goals>
+ <goal>javadoc</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
-
+
<distributionManagement>
<repository>
<!-- Copy the dist to the local checkout of the JBoss maven2 repo ${maven.repository.root} -->
@@ -436,7 +420,7 @@
<url>dav:https://snapshots.jboss.org/maven2</url>
</snapshotRepository>
</distributionManagement>
-
+
<profiles>
<!-- =============================== -->
<!-- Database profiles -->
@@ -467,11 +451,11 @@
<!--
###################################################################
Profiles naming db instances in the Red Hat QA/QE lab
-
+
First, those with OSS drivers
###################################################################
-->
-
+
<!-- The MySQL 5 test envionment -->
<profile>
<id>mysql5</id>
@@ -491,7 +475,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The MySQL 5.1 test envionment -->
<profile>
<id>mysql51</id>
@@ -511,7 +495,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The MySQL 5.1 Cluster test envionment -->
<profile>
<id>mysql51-cluster</id>
@@ -532,7 +516,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The PostgreSQL 8.2.4 test envionment -->
<profile>
<id>postgresql824</id>
@@ -553,7 +537,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The PostgreSQL 8.3.7 test environment -->
<profile>
<id>postgresql837</id>
@@ -574,7 +558,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The PostgreSQL 8.4 test environment -->
<profile>
<id>postgresql84</id>
@@ -594,13 +578,13 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!--
###################################################################
Then, those with commercial drivers
###################################################################
-->
-
+
<!-- The DB2 8.x test envionment (using 9x drivers)-->
<profile>
<id>db2v82</id>
@@ -625,7 +609,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The DB2 9.1 test envionment (using 9x drivers)-->
<profile>
<id>db2v91</id>
@@ -650,7 +634,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The DB2 9.7 test envionment (using 9x drivers)-->
<profile>
<id>db2v97</id>
@@ -675,7 +659,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The Oracle9i test envionment -->
<profile>
<id>oracle9i</id>
@@ -696,7 +680,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The Oracle10g test envionment -->
<profile>
<id>oracle10g</id>
@@ -717,7 +701,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The Oracle11g test envionment -->
<profile>
<id>oracle11g</id>
@@ -737,7 +721,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The Oracle11gRAC test envionment -->
<profile>
<id>oracle11gRAC</id>
@@ -759,7 +743,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The Sybase 15 test envionment -->
<profile>
<id>sybase15</id>
@@ -779,7 +763,7 @@
<jdbc.isolation/>
</properties>
</profile>
-
+
<!-- The SQLServer2005 (MS JDBC) test envionment -->
<profile>
<id>mssql2005</id>
@@ -799,7 +783,7 @@
<jdbc.isolation>4096</jdbc.isolation>
</properties>
</profile>
-
+
<!-- The SQLServer2005 (MS JDBC) test envionment -->
<profile>
<id>mssql2008</id>
@@ -819,162 +803,5 @@
<jdbc.isolation>4096</jdbc.isolation>
</properties>
</profile>
-
- <!-- ================================ -->
- <!-- Dependecy profiles to test w and -->
- <!-- w/o optional dependencies -->
- <!-- =============================== -->
- <profile>
- <id>with-optional-jars</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <dependencies>
- <!-- =============================== -->
- <!-- Optional Dependencies -->
- <!-- =============================== -->
- <dependency>
- <groupId>org.hibernate</groupId>
- <artifactId>hibernate-annotations</artifactId>
- <version>${hibernateVersion}</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.hibernate</groupId>
- <artifactId>hibernate-entitymanager</artifactId>
- <version>${hibernateVersion}</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.solr</groupId>
- <artifactId>solr-common</artifactId>
- <version>1.3.0</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.solr</groupId>
- <artifactId>solr-core</artifactId>
- <version>1.3.0</version>
- <optional>true</optional>
- <exclusions>
- <exclusion>
- <groupId>commons-httpclient</groupId>
- <artifactId>commons-httpclient</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.solr</groupId>
- <artifactId>solr-solrj</artifactId>
- </exclusion>
- <exclusion>
- <groupId>woodstox</groupId>
- <artifactId>wstx-asl</artifactId>
- </exclusion>
- <exclusion>
- <groupId>net.java.dev.stax-utils</groupId>
- <artifactId>stax-utils</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.solr</groupId>
- <artifactId>solr-lucene-core</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-snowball</artifactId>
- <version>${luceneVersion}</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-analyzers</artifactId>
- <version>${luceneVersion}</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-codec</artifactId>
- <version>1.3</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-io</artifactId>
- <version>1.3.2</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>javax.jms</groupId>
- <artifactId>jms</artifactId>
- <version>1.1</version>
- <scope>provided</scope>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>jgroups</groupId>
- <artifactId>jgroups</artifactId>
- <version>2.6.7.GA</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>javax.annotation</groupId>
- <artifactId>jsr250-api</artifactId>
- <version>1.0</version>
- <optional>true</optional>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <forkMode>once</forkMode>
- <!--argLine>-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005</argLine-->
- <redirectTestOutputToFile>true</redirectTestOutputToFile>
- <excludes>
- <exclude>**/classloading/*.java</exclude>
- <exclude>**/*PerfTest.java</exclude>
- </excludes>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
- <profile>
- <id>without-optional-jars</id>
- <dependencies>
- <dependency>
- <groupId>javassist</groupId>
- <artifactId>javassist</artifactId>
- <version>3.4.GA</version>
- <optional>true</optional>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <forkMode>once</forkMode>
- <!--argLine>-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005</argLine-->
- <redirectTestOutputToFile>true</redirectTestOutputToFile>
- <excludes>
- <exclude>none</exclude>
- <exclude>**/*PerfTest.java</exclude>
- </excludes>
- <includes>
- <include>**/classloading/*Test.java</include>
- </includes>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
</profiles>
</project>
15 years, 8 months
Hibernate SVN: r19001 - in validator/trunk: hibernate-validator and 1 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2010-03-15 16:36:48 -0400 (Mon, 15 Mar 2010)
New Revision: 19001
Added:
validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/GroupsTest.java
validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/Try.java
Modified:
validator/trunk/hibernate-validator/pom.xml
validator/trunk/pom.xml
Log:
HV-267 added maven-bundle-plugin to generate OSGi manifest entries
Modified: validator/trunk/hibernate-validator/pom.xml
===================================================================
--- validator/trunk/hibernate-validator/pom.xml 2010-03-15 14:16:44 UTC (rev 19000)
+++ validator/trunk/hibernate-validator/pom.xml 2010-03-15 20:36:48 UTC (rev 19001)
@@ -113,6 +113,40 @@
</configuration>
</plugin>
<plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile>${pom.build.outputDirectory}/META-INF/MANIFEST.MF</manifestFile>
+ </archive>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <extensions>true</extensions>
+ <configuration>
+ <instructions>
+ <Import-Package>
+ javax.persistence.*;version="[2.0.0,3.0.0)";resolution:=optional,
+ javax.validation.*;version="[1.0.0,2.0.0)",
+ javax.xml.*;version="0",
+ org.xml.sax.*;version="0",
+ org.slf4j.*;version="[1.5.6,2.0.0)"
+ </Import-Package>
+ <Export-Package>org.hibernate.validator.*;version="${pom.version}"</Export-Package>
+ </instructions>
+ </configuration>
+ <executions>
+ <execution>
+ <id>bundle-manifest</id>
+ <phase>process-classes</phase>
+ <goals>
+ <goal>manifest</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
Copied: validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/GroupsTest.java (from rev 18807, validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/metadata/ElementDescriptorTest.java)
===================================================================
--- validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/GroupsTest.java (rev 0)
+++ validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/GroupsTest.java 2010-03-15 20:36:48 UTC (rev 19001)
@@ -0,0 +1,45 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.validator.engine.groups;
+
+import java.util.Set;
+import javax.validation.ConstraintViolation;
+import javax.validation.Validator;
+
+import org.testng.annotations.Test;
+
+import org.hibernate.validator.util.TestUtil;
+
+import static org.hibernate.validator.util.TestUtil.assertCorrectConstraintViolationMessages;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class GroupsTest {
+
+ @Test
+ public void testGroupInheritance() {
+ Validator validator = TestUtil.getValidator();
+ Try tryMe = new Try();
+ tryMe.field2 = "foo";
+ tryMe.field3 = "bar";
+
+ Set<ConstraintViolation<Try>> violations = validator.validate( tryMe, Try.GlobalCheck.class );
+ //assertCorrectConstraintViolationMessages(violations, "field1");
+ }
+}
\ No newline at end of file
Added: validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/Try.java
===================================================================
--- validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/Try.java (rev 0)
+++ validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/Try.java 2010-03-15 20:36:48 UTC (rev 19001)
@@ -0,0 +1,51 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.validator.engine.groups;
+
+import javax.validation.GroupSequence;
+import javax.validation.constraints.NotNull;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class Try {
+ @NotNull(message = "field1", groups = BaseComponent.class)
+ public String field1;
+
+ @NotNull(message = "field2", groups = Component.class)
+ public String field2;
+
+ @NotNull(message = "field3", groups = OtherComponent.class)
+ public String field3;
+
+
+ public interface BaseComponent {
+ }
+
+ public interface Component extends BaseComponent {
+ }
+
+ public interface OtherComponent {
+ }
+
+ @GroupSequence({ Component.class, OtherComponent.class })
+ public interface GlobalCheck {
+ }
+}
+
+
Property changes on: validator/trunk/hibernate-validator/src/test/java/org/hibernate/validator/engine/groups/Try.java
___________________________________________________________________
Name: svn:keywords
+ Id
Modified: validator/trunk/pom.xml
===================================================================
--- validator/trunk/pom.xml 2010-03-15 14:16:44 UTC (rev 19000)
+++ validator/trunk/pom.xml 2010-03-15 20:36:48 UTC (rev 19001)
@@ -279,6 +279,11 @@
<artifactId>maven-cli-plugin</artifactId>
<version>0.6.3.CR2</version>
</plugin>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <version>2.0.1</version>
+ </plugin>
</plugins>
</pluginManagement>
</build>
15 years, 8 months
Hibernate SVN: r19000 - core/trunk/testsuite/src/test/java/org/hibernate/test/hql.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-15 10:16:44 -0400 (Mon, 15 Mar 2010)
New Revision: 19000
Modified:
core/trunk/testsuite/src/test/java/org/hibernate/test/hql/CriteriaHQLAlignmentTest.java
Log:
HHH-5010 org.hibernate.test.hql.CriteriaHQLAlignmentTest.testCriteriaAggregationReturnType() needs call flush before do the query
Modified: core/trunk/testsuite/src/test/java/org/hibernate/test/hql/CriteriaHQLAlignmentTest.java
===================================================================
--- core/trunk/testsuite/src/test/java/org/hibernate/test/hql/CriteriaHQLAlignmentTest.java 2010-03-15 12:49:58 UTC (rev 18999)
+++ core/trunk/testsuite/src/test/java/org/hibernate/test/hql/CriteriaHQLAlignmentTest.java 2010-03-15 14:16:44 UTC (rev 19000)
@@ -119,7 +119,8 @@
human.setBigIntegerValue( new BigInteger("42") );
human.setBigDecimalValue( new BigDecimal(45) );
s.save(human);
-
+ s.flush();
+ s.clear();
// EJB3: COUNT returns Long
Long longValue = (Long) s.createCriteria( Human.class ).setProjection( Projections.rowCount()).uniqueResult();
assertEquals(longValue, new Long(1));
15 years, 8 months
Hibernate SVN: r18999 - core/trunk/core/src/main/java/org/hibernate/dialect.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2010-03-15 08:49:58 -0400 (Mon, 15 Mar 2010)
New Revision: 18999
Modified:
core/trunk/core/src/main/java/org/hibernate/dialect/Ingres9Dialect.java
Log:
HHH-4946 org.hibernate.test.legacy.FooBarTests testLimit failure with Ingres
Modified: core/trunk/core/src/main/java/org/hibernate/dialect/Ingres9Dialect.java
===================================================================
--- core/trunk/core/src/main/java/org/hibernate/dialect/Ingres9Dialect.java 2010-03-15 10:29:22 UTC (rev 18998)
+++ core/trunk/core/src/main/java/org/hibernate/dialect/Ingres9Dialect.java 2010-03-15 12:49:58 UTC (rev 18999)
@@ -3,6 +3,7 @@
import java.sql.Types;
import org.hibernate.Hibernate;
+import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
/**
@@ -41,7 +42,6 @@
*/
protected void registerDateTimeColumnTypes() {
registerColumnType(Types.DATE, "ansidate");
- //registerColumnType(Types.TIME, "time with time zone");
registerColumnType(Types.TIMESTAMP, "timestamp(9) with time zone");
}
@@ -126,7 +126,7 @@
}
/**
- * Retrieve the command used to retrieve the current timestamp from the
+ * Retrieve the command used to retrieve the current timestammp from the
* database.
*
* @return The command.
@@ -189,7 +189,7 @@
}
/**
- * Does this dialect support bind variables (i.e., prepared statement
+ * Does this dialect support bind variables (i.e., prepared statememnt
* parameters) for its limit/offset?
*
* @return false
@@ -199,17 +199,29 @@
}
/**
+ * Does the <tt>LIMIT</tt> clause take a "maximum" row number instead
+ * of a total number of returned rows?
+ */
+ public boolean useMaxForLimit() {
+ return false;
+ }
+
+ /**
* Add a <tt>LIMIT</tt> clause to the given SQL <tt>SELECT</tt>
*
* @return the modified SQL
*/
public String getLimitString(String querySelect, int offset, int limit) {
- StringBuffer sb = new StringBuffer(querySelect.length() + 16);
- sb.append(querySelect.trim()).insert(6, " first " + limit);
+ StringBuffer soff = new StringBuffer(" offset " + offset);
+ StringBuffer slim = new StringBuffer(" fetch first " + limit + " rows only");
+ StringBuffer sb = new StringBuffer(querySelect.length() +
+ soff.length() + slim.length()).append(querySelect);
if (offset > 0) {
- sb.append(" offset " + offset);
+ sb.append(soff);
}
+ if (limit > 0) {
+ sb.append(slim);
+ }
return sb.toString();
}
-
}
15 years, 8 months