Hibernate SVN: r14508 - core/trunk.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-04-12 00:11:21 -0400 (Sat, 12 Apr 2008)
New Revision: 14508
Modified:
core/trunk/pom.xml
Log:
release driving assembly ???
Modified: core/trunk/pom.xml
===================================================================
--- core/trunk/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
+++ core/trunk/pom.xml 2008-04-12 04:11:21 UTC (rev 14508)
@@ -48,10 +48,17 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>2.0-beta-7</version>
- <configuration>
- <autoVersionSubmodules>true</autoVersionSubmodules>
- <goals>install,site,assembly:single</goals>
- </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>perform</goal>
+ </goals>
+ <configuration>
+ <autoVersionSubmodules>true</autoVersionSubmodules>
+ <goals>install,site,assembly:single</goals>
+ </configuration>
+ </execution>
+ </executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
16 years, 10 months
Hibernate SVN: r14507 - in core/trunk: cache-ehcache and 15 other directories.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-04-12 00:05:12 -0400 (Sat, 12 Apr 2008)
New Revision: 14507
Modified:
core/trunk/cache-ehcache/pom.xml
core/trunk/cache-jbosscache/pom.xml
core/trunk/cache-jbosscache2/pom.xml
core/trunk/cache-oscache/pom.xml
core/trunk/cache-swarmcache/pom.xml
core/trunk/connection-c3p0/pom.xml
core/trunk/connection-proxool/pom.xml
core/trunk/core/pom.xml
core/trunk/documentation/jbosscache2/pom.xml
core/trunk/documentation/manual/pom.xml
core/trunk/documentation/pom.xml
core/trunk/documentation/tutorial/pom.xml
core/trunk/eg/pom.xml
core/trunk/jmx/pom.xml
core/trunk/pom.xml
core/trunk/testing/pom.xml
core/trunk/testsuite/pom.xml
Log:
update to version 3 of parents
Modified: core/trunk/cache-ehcache/pom.xml
===================================================================
--- core/trunk/cache-ehcache/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/cache-ehcache/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/cache-jbosscache/pom.xml
===================================================================
--- core/trunk/cache-jbosscache/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/cache-jbosscache/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/cache-jbosscache2/pom.xml
===================================================================
--- core/trunk/cache-jbosscache2/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/cache-jbosscache2/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/cache-oscache/pom.xml
===================================================================
--- core/trunk/cache-oscache/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/cache-oscache/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/cache-swarmcache/pom.xml
===================================================================
--- core/trunk/cache-swarmcache/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/cache-swarmcache/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/connection-c3p0/pom.xml
===================================================================
--- core/trunk/connection-c3p0/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/connection-c3p0/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/connection-proxool/pom.xml
===================================================================
--- core/trunk/connection-proxool/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/connection-proxool/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/core/pom.xml
===================================================================
--- core/trunk/core/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/core/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/documentation/jbosscache2/pom.xml
===================================================================
--- core/trunk/documentation/jbosscache2/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/documentation/jbosscache2/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -7,7 +7,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/documentation/manual/pom.xml
===================================================================
--- core/trunk/documentation/manual/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/documentation/manual/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -7,7 +7,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/documentation/pom.xml
===================================================================
--- core/trunk/documentation/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/documentation/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -7,7 +7,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/documentation/tutorial/pom.xml
===================================================================
--- core/trunk/documentation/tutorial/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/documentation/tutorial/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -7,7 +7,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/eg/pom.xml
===================================================================
--- core/trunk/eg/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/eg/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -7,7 +7,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/jmx/pom.xml
===================================================================
--- core/trunk/jmx/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/jmx/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/pom.xml
===================================================================
--- core/trunk/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
@@ -50,8 +50,20 @@
<version>2.0-beta-7</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
+ <goals>install,site,assembly:single</goals>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-2</version>
+ <configuration>
+ <descriptors>
+ <descriptor>src/assembly/hibernate-all.xml</descriptor>
+ <descriptor>src/assembly/dist.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </plugin>
</plugins>
</build>
@@ -61,39 +73,10 @@
A profile used implicitly by the release plugin. Here we use it to enable documentation building
as well as execution of the assembly plugin (to build the SourceForge dist).
-->
- <id>release</id>
-<!--
- temporarily disable documentation module
+ <id>release-profile</id>
<modules>
<module>documentation</module>
</modules>
--->
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.2-beta-2</version>
-<!--
- <executions>
- <execution>
- <id>assemble</id>
- <phase>deploy</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
--->
- <configuration>
- <descriptors>
- <descriptor>src/assembly/dist.xml</descriptor>
- <descriptor>src/assembly/hibernate-all.xml</descriptor>
- </descriptors>
- </configuration>
- </plugin>
- </plugins>
- </build>
</profile>
<!-- seperate profile for documentation activation -->
Modified: core/trunk/testing/pom.xml
===================================================================
--- core/trunk/testing/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/testing/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
Modified: core/trunk/testsuite/pom.xml
===================================================================
--- core/trunk/testsuite/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
+++ core/trunk/testsuite/pom.xml 2008-04-12 04:05:12 UTC (rev 14507)
@@ -5,7 +5,7 @@
<parent>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core-parent</artifactId>
- <version>2</version>
+ <version>3</version>
</parent>
<groupId>org.hibernate</groupId>
16 years, 10 months
Hibernate SVN: r14506 - core/trunk/cache-jbosscache2.
by hibernate-commits@lists.jboss.org
Author: bstansberry(a)jboss.com
Date: 2008-04-11 17:31:27 -0400 (Fri, 11 Apr 2008)
New Revision: 14506
Modified:
core/trunk/cache-jbosscache2/pom.xml
Log:
[HHH-3141] Move to JBC 2.1.1.CR2
Modified: core/trunk/cache-jbosscache2/pom.xml
===================================================================
--- core/trunk/cache-jbosscache2/pom.xml 2008-04-11 16:56:02 UTC (rev 14505)
+++ core/trunk/cache-jbosscache2/pom.xml 2008-04-11 21:31:27 UTC (rev 14506)
@@ -25,7 +25,7 @@
<dependency>
<groupId>org.jboss.cache</groupId>
<artifactId>jbosscache-core</artifactId>
- <version>2.1.1.CR1</version>
+ <version>2.1.1.CR2</version>
</dependency>
<!-- test dependencies -->
@@ -196,4 +196,4 @@
</properties>
</profile>
</profiles>
-</project>
\ No newline at end of file
+</project>
16 years, 10 months
Hibernate SVN: r14505 - in search/trunk: src/test/org/hibernate/search/test/query and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2008-04-11 12:56:02 -0400 (Fri, 11 Apr 2008)
New Revision: 14505
Modified:
search/trunk/doc/reference/en/modules/mapping.xml
search/trunk/src/test/org/hibernate/search/test/query/TermVectorTest.java
Log:
HSEARCH-141 doc (John Griffin)
Modified: search/trunk/doc/reference/en/modules/mapping.xml
===================================================================
--- search/trunk/doc/reference/en/modules/mapping.xml 2008-04-11 16:42:58 UTC (rev 14504)
+++ search/trunk/doc/reference/en/modules/mapping.xml 2008-04-11 16:56:02 UTC (rev 14505)
@@ -77,9 +77,9 @@
<para>termVector: describes collections of term-frequency pairs. To
utilize Term Vectors this attribute enables their being stored
during indexing so they are stored with documents. The default value
- is Field.TermVector.NO. </para>
+ is Field.TermVector.NO.</para>
- <para>The different values of this attribute are </para>
+ <para>The different values of this attribute are</para>
<informaltable align="left" width="">
<tgroup cols="2">
@@ -97,7 +97,9 @@
<row>
<entry align="left">Field.TermVector.YES</entry>
- <entry>Store the term vectors of each document.</entry>
+ <entry>Store the term vectors of each document. This
+ produces two synchronized arrays, one contains document
+ terms and the other contains the term's frequency.</entry>
</row>
<row>
@@ -109,15 +111,19 @@
<row>
<entry align="left">Field.TermVector.WITH_OFFSETS</entry>
- <entry>Store the term vector and token offset
- information</entry>
+ <entry>Store the term vector and token offset information.
+ This is the same as Field.TermVector.YES plus this contains
+ the starting and ending offset position information for the
+ terms</entry>
</row>
<row>
<entry align="left">Field.TermVector.WITH_POSITIONS</entry>
- <entry>Store the term vector and token position
- information</entry>
+ <entry>Store the term vector and token position information.
+ This is the same as Field.TermVector.YES plus this contains
+ the ordinal positions of each occurrence of a term in a
+ document.</entry>
</row>
<row>
@@ -125,7 +131,8 @@
align="left">Field.TermVector.WITH_POSITIONS_OFFSETS</entry>
<entry>Store the term vector, token position and offset
- information</entry>
+ information. This is a combination of the YES, WITH_OFFSETS
+ and WITH_POSITIONS.</entry>
</row>
</tbody>
</tgroup>
@@ -635,19 +642,17 @@
<section>
<title>Custom Bridge</title>
- <para>Sometimes, the built-in bridges of Hibernate Search do not
- cover some of your property types, or the String representation
- used by the bridge does not meet your requirements.
- The following paragraphs describe several solutions
- to this problem.</para>
+ <para>Sometimes, the built-in bridges of Hibernate Search do not cover
+ some of your property types, or the String representation used by the
+ bridge does not meet your requirements. The following paragraphs
+ describe several solutions to this problem.</para>
<section>
<title>StringBridge</title>
- <para>The simpliest custom solution is to give Hibernate
- Search an implementation of your expected
- <emphasis>object to String</emphasis> bridge. To do so you need to
- implements the
+ <para>The simpliest custom solution is to give Hibernate Search an
+ implementation of your expected <emphasis>object to String</emphasis>
+ bridge. To do so you need to implements the
<literal>org.hibernate.search.bridge.StringBridge</literal>
interface</para>
@@ -837,7 +842,8 @@
custom field bridge implementation receives the entity instance as the
value parameter instead of a particular property. Though not shown in
this example, <classname>@ClassBridge</classname> supports the
- <methodname>termVector</methodname> attribute discussed previously.</para>
+ <methodname>termVector</methodname> attribute discussed
+ previously.</para>
<programlisting>@Entity
@Indexed
Modified: search/trunk/src/test/org/hibernate/search/test/query/TermVectorTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/query/TermVectorTest.java 2008-04-11 16:42:58 UTC (rev 14504)
+++ search/trunk/src/test/org/hibernate/search/test/query/TermVectorTest.java 2008-04-11 16:56:02 UTC (rev 14505)
@@ -17,100 +17,92 @@
* @author John Griffin
*/
public class TermVectorTest extends SearchTestCase {
- private static Log log = LogFactory.getLog( TermVectorTest.class );
+ private static Log log = LogFactory.getLog(TermVectorTest.class);
- public void testPositionOffsets() throws Exception {
- FullTextSession s = Search.createFullTextSession( openSession() );
- createIndex( s );
+ public void testPositionOffsets() throws Exception {
+ FullTextSession s = Search.createFullTextSession(openSession());
+ createIndex(s);
- s.clear();
- Transaction tx = s.beginTransaction();
+ s.clear();
+ Transaction tx = s.beginTransaction();
- // Here's how to get a reader from a FullTextSession
- SearchFactory searchFactory = s.getSearchFactory();
- DirectoryProvider provider = searchFactory.getDirectoryProviders( ElectricalProperties.class )[0];
- ReaderProvider readerProvider = searchFactory.getReaderProvider();
- IndexReader reader = readerProvider.openReader( provider );
+ // Here's how to get a reader from a FullTextSession
+ SearchFactory searchFactory = s.getSearchFactory();
+ DirectoryProvider provider = searchFactory.getDirectoryProviders(ElectricalProperties.class)[0];
+ ReaderProvider readerProvider = searchFactory.getReaderProvider();
+ IndexReader reader = readerProvider.openReader(provider);
- /**
- * Since there are so many combinations of results here, rather
- * than try to do assertions, this test prints out all the results
- * found from the three ElectricalProperties entities. This will
- * do a better job of demonstrating exactly what the result are. - J.G.
- */
- ///TODO: try and find some ways to assert it. Nobody reads the results. I've added
- for (int x = 0; x < 3; x++) {
- TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector( x, "content" );
- assertNotNull( vector );
- String[] terms = vector.getTerms();
- int[] freqs = vector.getTermFrequencies();
+ /**
+ * Since there are so many combinations of results here, we are only going
+ * to assert a few. - J.G.
+ */
+ int x = 0;
+ TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector(x, "content");
+ assertNotNull(vector);
+ String[] terms = vector.getTerms();
+ int[] freqs = vector.getTermFrequencies();
- for (int y = 0; y < vector.size(); y++) {
- log.info( "doc# =>" + x );
- log.info( " term => " + terms[y] );
- log.info( " freq => " + freqs[y] );
+ assertEquals("electrical", terms[x]);
+ assertEquals(2, freqs[x]);
- int[] positions = vector.getTermPositions( y );
- TermVectorOffsetInfo[] offsets = vector.getOffsets( y );
- for (int z = 0; z < positions.length; z++) {
- log.info( " position => " + positions[z] );
- log.info( " starting offset => " + offsets[z].getStartOffset() );
- log.info( " ending offset => " + offsets[z].getEndOffset() );
- }
- log.info( "---------------" );
- }
- }
+ TermVectorOffsetInfo[] offsets = vector.getOffsets(x);
+ assertEquals(0, offsets[x].getStartOffset());
+ assertEquals(10, offsets[x].getEndOffset());
- //cleanup
- for (Object element : s.createQuery( "from " + ElectricalProperties.class.getName() ).list())
- s.delete( element );
- tx.commit();
- s.close();
- }
+ int[] termPositions = vector.getTermPositions(0);
+ assertEquals(0, termPositions[0]);
+ assertEquals(3, termPositions[1]);
- public void testNoTermVector() throws Exception {
- FullTextSession s = Search.createFullTextSession( openSession() );
- Transaction tx = s.beginTransaction();
+ //cleanup
+ for (Object element : s.createQuery("from " + Employee.class.getName()).list()) s.delete(element);
+ tx.commit();
+ s.close();
+ }
- Employee e1 = new Employee( 1000, "Griffin", "ITech" );
- s.save( e1 );
- tx.commit();
- s.clear();
- tx = s.beginTransaction();
+ public void testNoTermVector() throws Exception {
+ FullTextSession s = Search.createFullTextSession(openSession());
+ Transaction tx = s.beginTransaction();
- // Here's how to get a reader from a FullTextSession
- SearchFactory searchFactory = s.getSearchFactory();
- DirectoryProvider provider = searchFactory.getDirectoryProviders( Employee.class )[0];
- ReaderProvider readerProvider = searchFactory.getReaderProvider();
- IndexReader reader = readerProvider.openReader( provider );
+ Employee e1 = new Employee(1000, "Griffin", "ITech");
+ s.save(e1);
+ tx.commit();
+ s.clear();
- TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector( 0, "dept" );
- assertNull( "should not find a term position vector", vector );
+ tx = s.beginTransaction();
- //cleanup
- for (Object element : s.createQuery( "from " + ElectricalProperties.class.getName() ).list())
- s.delete( element );
- tx.commit();
- s.close();
- }
+ // Here's how to get a reader from a FullTextSession
+ SearchFactory searchFactory = s.getSearchFactory();
+ DirectoryProvider provider = searchFactory.getDirectoryProviders(Employee.class)[0];
+ ReaderProvider readerProvider = searchFactory.getReaderProvider();
+ IndexReader reader = readerProvider.openReader(provider);
- private void createIndex(FullTextSession s) {
- Transaction tx = s.beginTransaction();
- ElectricalProperties e1 = new ElectricalProperties( 1000, "Electrical Engineers measure Electrical Properties" );
- s.save( e1 );
- ElectricalProperties e2 = new ElectricalProperties( 1001, "Electrical Properties are interesting" );
- s.save( e2 );
- ElectricalProperties e3 = new ElectricalProperties( 1002, "Electrical Properties are measurable properties" );
- s.save( e3 );
+ TermPositionVector vector = (TermPositionVector) reader.getTermFreqVector(0, "dept");
+ assertNull("should not find a term position vector", vector);
- tx.commit();
- }
+ //cleanup
+ for (Object element : s.createQuery("from " + ElectricalProperties.class.getName()).list())
+ s.delete(element);
+ tx.commit();
+ s.close();
+ }
- protected Class[] getMappings() {
- return new Class[] {
- ElectricalProperties.class,
- Employee.class
- };
- }
+ private void createIndex(FullTextSession s) {
+ Transaction tx = s.beginTransaction();
+ ElectricalProperties e1 = new ElectricalProperties(1000, "Electrical Engineers measure Electrical Properties");
+ s.save(e1);
+ ElectricalProperties e2 = new ElectricalProperties(1001, "Electrical Properties are interesting");
+ s.save(e2);
+ ElectricalProperties e3 = new ElectricalProperties(1002, "Electrical Properties are measurable properties");
+ s.save(e3);
+
+ tx.commit();
+ }
+
+ protected Class[] getMappings() {
+ return new Class[]{
+ ElectricalProperties.class,
+ Employee.class
+ };
+ }
}
16 years, 10 months
Hibernate SVN: r14504 - in search/trunk/src/java/org/hibernate/search: store and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2008-04-11 12:42:58 -0400 (Fri, 11 Apr 2008)
New Revision: 14504
Modified:
search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java
search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
Log:
HSEARCH-176
Modified: search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java 2008-04-11 11:07:14 UTC (rev 14503)
+++ search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java 2008-04-11 16:42:58 UTC (rev 14504)
@@ -9,6 +9,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.index.IndexWriter;
+import org.hibernate.search.SearchException;
import org.hibernate.search.backend.configuration.IndexWriterSetting;
/**
@@ -28,8 +29,8 @@
// value keyword
public static final String EXPLICIT_DEFAULT_VALUE = "default";
// property path keywords
- public static final String BATCH = "batch.";
- public static final String TRANSACTION = "transaction.";
+ public static final String BATCH = "batch";
+ public static final String TRANSACTION = "transaction";
private final ParameterSet transactionIndexParameters;
private final ParameterSet batchIndexParameters;
@@ -40,11 +41,11 @@
//don't iterate on property entries we know all the keys:
for ( IndexWriterSetting t : IndexWriterSetting.values() ) {
String key = t.getKey();
- String trxValue = sourceProps.getProperty( TRANSACTION + key );
+ String trxValue = sourceProps.getProperty( TRANSACTION + "." + key );
if (trxValue != null) {
transactionProps.setProperty( key, trxValue );
}
- String batchValue = sourceProps.getProperty( BATCH + key );
+ String batchValue = sourceProps.getProperty( BATCH + "." + key );
if (batchValue != null) {
batchProps.setProperty( key, batchValue );
}
@@ -80,14 +81,14 @@
* @param writer the IndexWriter whereto the parameters will be applied.
*/
public void applyToWriter(IndexWriter writer) {
- try {
- for ( Map.Entry<IndexWriterSetting,Integer> entry : parameters.entrySet() ) {
+ for ( Map.Entry<IndexWriterSetting,Integer> entry : parameters.entrySet() ) {
+ try {
entry.getKey().applySetting( writer, entry.getValue() );
+ } catch ( IllegalArgumentException e ) {
+ //TODO if DirectoryProvider had getDirectoryName() exceptions could tell better
+ throw new SearchException( "Illegal IndexWriter setting "
+ + entry.getKey().getKey() + " "+ e.getMessage(), e );
}
- } catch (IllegalArgumentException e) {
- //FIXME shouldn't we raise an exception instead
- log.error( "Illegal IndexWriter setting" + e.getMessage()
- + ". Will use default settings." );
}
}
Modified: search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-04-11 11:07:14 UTC (rev 14503)
+++ search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-04-11 16:42:58 UTC (rev 14504)
@@ -7,6 +7,7 @@
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.regex.Pattern;
import org.hibernate.HibernateException;
import org.hibernate.annotations.common.reflection.ReflectionManager;
@@ -52,6 +53,7 @@
private static final String SHARDING_STRATEGY = "sharding_strategy";
private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
+ private static Pattern dotPattern = Pattern.compile( "\\." );
public DirectoryProviders createDirectoryProviders(XClass entity, Configuration cfg, SearchFactoryImplementor searchFactoryImplementor) {
@@ -202,18 +204,27 @@
* If the index is sharded, the Properties index matches the shard index
*/
private static Properties[] getDirectoryProperties(Configuration cfg, String directoryProviderName) {
+
Properties cfgAndImplicitProperties = new Properties();
- // fcg has no defaults, so we may use keySet iteration
+ // cfg has no defaults, so we may use keySet iteration
//FIXME not so sure about that cfg.setProperties()?
for ( Map.Entry entry : cfg.getProperties().entrySet() ) {
String key = entry.getKey().toString();// casting to String
if ( key.startsWith( LUCENE_PREFIX ) ) {
//put regular properties and add an explicit batch property when a transaction property is set
cfgAndImplicitProperties.put( key, entry.getValue() );
- if ( key.contains( LuceneIndexingParameters.TRANSACTION ) ) {
- //FIXME fix that transaction can appear in the index name
- //I imagine checking the last '.transaction.' is safe.
- String additionalKey = key.replaceFirst(LuceneIndexingParameters.TRANSACTION, LuceneIndexingParameters.BATCH);
+ //be careful to replace only the intended ".transaction." with ".batch.":
+ String[] splitKey = dotPattern.split( key );
+ //TODO this code is vulnerable to properties with dot in the name. This is not a problem today though
+ if ( splitKey.length > 2 && splitKey[ splitKey.length - 2 ]
+ .equals( LuceneIndexingParameters.TRANSACTION ) ) {
+ splitKey[ splitKey.length - 2 ] = LuceneIndexingParameters.BATCH;
+ StringBuilder missingKeyBuilder = new StringBuilder( splitKey[0] );
+ for (int i = 1; i < splitKey.length; i++) {
+ missingKeyBuilder.append( "." );
+ missingKeyBuilder.append( splitKey[i] );
+ }
+ String additionalKey = missingKeyBuilder.toString();
if ( cfg.getProperty(additionalKey) == null ){
cfgAndImplicitProperties.put(additionalKey, cfg.getProperty(key) );
}
16 years, 10 months
Hibernate SVN: r14503 - in branches/Branch_3_2/HibernateExt/tools/src: java/org/hibernate/tool/hbm2x and 2 other directories.
by hibernate-commits@lists.jboss.org
Author: max.andersen(a)jboss.com
Date: 2008-04-11 07:07:14 -0400 (Fri, 11 Apr 2008)
New Revision: 14503
Added:
branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/hbm2x/Hbm2DDLExporter.java
Modified:
branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/ant/Hbm2DDLExporterTask.java
branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/ant/AntHibernateToolTest.java
branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/CachedMetaDataTest.java
Log:
The core tools part of JBIDE-1617
Modified: branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/ant/Hbm2DDLExporterTask.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/ant/Hbm2DDLExporterTask.java 2008-04-11 04:47:54 UTC (rev 14502)
+++ branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/ant/Hbm2DDLExporterTask.java 2008-04-11 11:07:14 UTC (rev 14503)
@@ -4,16 +4,8 @@
*/
package org.hibernate.tool.ant;
-import java.io.File;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Iterator;
-
-import org.apache.tools.ant.BuildException;
-import org.apache.tools.ant.Project;
-import org.hibernate.tool.hbm2ddl.SchemaExport;
-import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.hbm2x.Exporter;
+import org.hibernate.tool.hbm2x.Hbm2DDLExporter;
/**
* @author max
@@ -29,52 +21,37 @@
boolean create = true;
boolean format = false;
- String outputfileName = null;
+ String outputFileName = null;
private boolean haltOnError = false;
public Hbm2DDLExporterTask(HibernateToolTask parent) {
super(parent);
}
- public void execute() {
- if(schemaUpdate) {
- SchemaUpdate update = new SchemaUpdate(parent.getConfiguration() );
- update.execute(scriptToConsole, exportToDatabase);
- }
- else {
- SchemaExport export = new SchemaExport(parent.getConfiguration() );
- if(outputfileName!=null) {
- export.setOutputFile(new File(getDestdir(),outputfileName).toString() );
- }
- if(delimiter!=null) {
- export.setDelimiter(delimiter);
- }
- export.setHaltOnError(haltOnError);
- export.setFormat(format);
- if(drop && create) {
- export.create(scriptToConsole, exportToDatabase);
- } else {
- export.execute(scriptToConsole, exportToDatabase, drop, create);
- }
-
- if(export.getExceptions().size()>0) {
- Iterator iterator = export.getExceptions().iterator();
- int cnt=1;
- parent.log(export.getExceptions().size() + " errors occurred while performing <hbm2ddl>.", Project.MSG_WARN);
- while ( iterator.hasNext() ) {
- Throwable throwable = (Throwable) iterator.next();
- parent.log("Error #" + cnt + ": " + throwable.toString(), Project.MSG_WARN);
- StringWriter sw = new StringWriter();
- throwable.printStackTrace(new PrintWriter(sw));
- parent.log(sw.getBuffer().toString(), Project.MSG_VERBOSE);
-
- }
- if(haltOnError) {
- throw new BuildException("Errors while performing <hbm2ddl>");
- }
- }
- }
+ public String getName() {
+ return "hbm2ddl (Generates database schema)";
}
+
+ protected Exporter configureExporter(Exporter exp) {
+ Hbm2DDLExporter exporter = (Hbm2DDLExporter) exp;
+ super.configureExporter( exp );
+ exporter.setExport(exportToDatabase);
+ exporter.setConsole(scriptToConsole);
+ exporter.setUpdate(schemaUpdate);
+ exporter.setDelimiter(delimiter);
+ exporter.setDrop(drop);
+ exporter.setCreate(create);
+ exporter.setFormat(format);
+ exporter.setOutputFileName(outputFileName);
+ exporter.setHaltonerror(haltOnError);
+ return exporter;
+ }
+
+ protected Exporter createExporter() {
+ Hbm2DDLExporter exporter = new Hbm2DDLExporter(parent.getConfiguration(), parent.getDestDir());
+ return exporter;
+ }
+
public void setExport(boolean export) {
exportToDatabase = export;
@@ -105,7 +82,7 @@
* File out put name (default: empty)
*/
public void setOutputFileName(String fileName) {
- outputfileName = fileName;
+ outputFileName = fileName;
}
public void setDrop(boolean drop) {
@@ -116,10 +93,6 @@
this.create = create;
}
- public String getName() {
- return "hbm2ddl (Generates database schema)";
- }
-
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
@@ -131,8 +104,4 @@
public void setHaltonerror(boolean haltOnError) {
this.haltOnError = haltOnError;
}
-
- protected Exporter createExporter() {
- throw new IllegalStateException("Should not call create exporter on hbm2ddl");
- }
}
Added: branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/hbm2x/Hbm2DDLExporter.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/hbm2x/Hbm2DDLExporter.java (rev 0)
+++ branches/Branch_3_2/HibernateExt/tools/src/java/org/hibernate/tool/hbm2x/Hbm2DDLExporter.java 2008-04-11 11:07:14 UTC (rev 14503)
@@ -0,0 +1,163 @@
+/*******************************************************************************
+ * Copyright (c) 2007 Exadel, Inc. and Red Hat, Inc.
+ * Distributed under license by Red Hat, Inc. All rights reserved.
+ * This program is made available under the terms of the
+ * Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Exadel, Inc. and Red Hat, Inc. - initial API and implementation
+ ******************************************************************************/
+package org.hibernate.tool.hbm2x;
+
+import java.io.File;
+import java.util.Iterator;
+
+import org.apache.tools.ant.BuildException;
+import org.hibernate.cfg.Configuration;
+import org.hibernate.tool.hbm2ddl.SchemaExport;
+import org.hibernate.tool.hbm2ddl.SchemaUpdate;
+
+/**
+ * Schema Export (.ddl) code generation.
+ *
+ * @author Vitali
+ *
+ */
+public class Hbm2DDLExporter extends AbstractExporter {
+
+ protected boolean exportToDatabase = true;
+ protected boolean scriptToConsole = true;
+ protected boolean schemaUpdate = false;
+ protected String delimiter = ";";
+ protected boolean drop = false;
+ protected boolean create = true;
+ protected boolean format = false;
+
+ protected String outputFileName = null;
+ protected boolean haltOnError = false;
+
+ public Hbm2DDLExporter() {
+ }
+
+ public Hbm2DDLExporter(Configuration cfg, File outputdir) {
+ super(cfg, outputdir);
+ }
+
+ protected boolean setupBoolProperty(String property, boolean defaultVal) {
+ if (!getProperties().containsKey(property)) {
+ return defaultVal;
+ }
+ return Boolean.parseBoolean(getProperties().getProperty(property));
+ }
+
+ protected void setupContext() {
+
+ exportToDatabase = setupBoolProperty("exportToDatabase", exportToDatabase);
+ scriptToConsole = setupBoolProperty("scriptToConsole", scriptToConsole);
+ schemaUpdate = setupBoolProperty("schemaUpdate", schemaUpdate);
+ delimiter = getProperties().getProperty("delimiter", delimiter);
+ drop = setupBoolProperty("drop", drop);
+ create = setupBoolProperty("create", create);
+ format = setupBoolProperty("format", format);
+ outputFileName = getProperties().getProperty("outputFileName", outputFileName);
+ haltOnError = setupBoolProperty("haltOnError", haltOnError);
+ super.setupContext();
+ }
+
+ protected void cleanUpContext() {
+ super.cleanUpContext();
+ }
+
+ protected void doStart() {
+
+ final Configuration configuration = getConfiguration();
+ if (schemaUpdate) {
+ SchemaUpdate update = new SchemaUpdate(configuration);
+ update.execute(scriptToConsole, exportToDatabase);
+ } else {
+ SchemaExport export = new SchemaExport(configuration);
+ if (null != outputFileName) {
+ export.setOutputFile(new File(getOutputDirectory(),
+ outputFileName).toString());
+ }
+ if (null != delimiter) {
+ export.setDelimiter(delimiter);
+ }
+ export.setHaltOnError(haltOnError);
+ export.setFormat(format);
+ if (drop && create) {
+ export.create(scriptToConsole, exportToDatabase);
+ } else {
+ export.execute(scriptToConsole, exportToDatabase, drop, create);
+ }
+
+ if (!export.getExceptions().isEmpty()) {
+ int i = 1;
+ for (Iterator iterator = export.getExceptions().iterator(); iterator
+ .hasNext(); i++) {
+ Throwable element = (Throwable) iterator.next();
+ log.warn("Error #" + i + ": ", element);
+
+ }
+ log.error(i - 1 + " occurred while performing Hbm2DDLExporter.");
+ if (haltOnError) {
+ throw new BuildException(
+ "Errors while performing Hbm2DDLExporter");
+ }
+ }
+ }
+ }
+
+ public void setExport(boolean export) {
+ exportToDatabase = export;
+ }
+
+ /**
+ * Run SchemaUpdate instead of SchemaExport
+ */
+ public void setUpdate(boolean update) {
+ this.schemaUpdate = update;
+ }
+
+ /**
+ * Output sql to console ? (default true)
+ */
+ public void setConsole(boolean console) {
+ this.scriptToConsole = console;
+ }
+
+ /**
+ * Format the generated sql
+ */
+ public void setFormat(boolean format) {
+ this.format = format;
+ }
+
+ /**
+ * File out put name (default: empty)
+ */
+ public void setOutputFileName(String fileName) {
+ outputFileName = fileName;
+ }
+
+ public void setDrop(boolean drop) {
+ this.drop = drop;
+ }
+
+ public void setCreate(boolean create) {
+ this.create = create;
+ }
+
+ public void setDelimiter(String delimiter) {
+ this.delimiter = delimiter;
+ }
+
+ public String getDelimiter() {
+ return delimiter;
+ }
+
+ public void setHaltonerror(boolean haltOnError) {
+ this.haltOnError = haltOnError;
+ }
+}
Modified: branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/ant/AntHibernateToolTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/ant/AntHibernateToolTest.java 2008-04-11 04:47:54 UTC (rev 14502)
+++ branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/ant/AntHibernateToolTest.java 2008-04-11 11:07:14 UTC (rev 14503)
@@ -34,12 +34,17 @@
}
public void testHbm2DDLLogic() {
- executeTarget("testantcfg");
File baseDir = new File(project.getProperty("build.dir"), "topdown");
File onlyCreate = new File(baseDir, "onlycreate.sql");
File onlyDrop = new File(baseDir, "onlydrop.sql");
File dropAndCreate = new File(baseDir, "dropandcreate.sql");
+ assertFalse(onlyCreate.exists());
+ assertFalse(onlyDrop.exists());
+ assertFalse(dropAndCreate.exists());
+
+ executeTarget("testantcfg");
+
assertTrue(onlyCreate.exists());
assertTrue(onlyDrop.exists());
assertTrue(dropAndCreate.exists());
@@ -51,8 +56,12 @@
assertNotNull(TestHelper.findFirstString("drop", onlyDrop));
assertEquals(null, TestHelper.findFirstString("drop", onlyCreate));
- assertNotNull(TestHelper.findFirstString("create", onlyCreate));
+ assertNotNull(TestHelper.findFirstString("create", onlyCreate));
+ onlyCreate.delete();
+ onlyDrop.delete();
+ dropAndCreate.delete();
+
}
public void testJDBCConfiguration() {
Modified: branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/CachedMetaDataTest.java
===================================================================
--- branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/CachedMetaDataTest.java 2008-04-11 04:47:54 UTC (rev 14502)
+++ branches/Branch_3_2/HibernateExt/tools/src/test/org/hibernate/tool/hbm2x/CachedMetaDataTest.java 2008-04-11 11:07:14 UTC (rev 14503)
@@ -1,179 +1,178 @@
-/*
- * Created on 2004-12-01
- *
- */
-package org.hibernate.tool.hbm2x;
-
-import java.util.Iterator;
-
-import org.hibernate.cfg.JDBCMetaDataConfiguration;
-import org.hibernate.cfg.JDBCReaderFactory;
-import org.hibernate.cfg.Settings;
-import org.hibernate.cfg.reveng.DatabaseCollector;
-import org.hibernate.cfg.reveng.DefaultDatabaseCollector;
-import org.hibernate.cfg.reveng.DefaultReverseEngineeringStrategy;
-import org.hibernate.cfg.reveng.JDBCReader;
-import org.hibernate.cfg.reveng.ReverseEngineeringRuntimeInfo;
-import org.hibernate.cfg.reveng.dialect.CachedMetaDataDialect;
-import org.hibernate.cfg.reveng.dialect.MetaDataDialect;
-import org.hibernate.connection.ConnectionProvider;
-import org.hibernate.exception.SQLExceptionConverter;
-import org.hibernate.mapping.Table;
-import org.hibernate.tool.JDBCMetaDataBinderTestCase;
-
-
-
-/**
- * @author max
- *
- */
-public class CachedMetaDataTest extends JDBCMetaDataBinderTestCase {
-
- protected void configure(JDBCMetaDataConfiguration configuration) {
- super.configure( configuration );
- }
-
- public class MockedMetaDataDialect implements MetaDataDialect {
-
- MetaDataDialect delegate;
- private boolean failOnDelegateAccess;
-
- public MockedMetaDataDialect(MetaDataDialect realMetaData) {
- delegate = realMetaData;
- }
-
- public void close() {
- delegate.close();
- }
-
- public void close(Iterator iterator) {
- delegate.close( iterator );
- }
-
- public void configure(ReverseEngineeringRuntimeInfo info) {
- delegate.configure(info);
- }
-
- public Iterator getColumns(String catalog, String schema, String table, String column) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getColumns( catalog, schema, table, column );
- }
- }
-
- public Iterator getExportedKeys(String catalog, String schema, String table) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getExportedKeys( catalog, schema, table );
- }
- }
-
- public Iterator getIndexInfo(String catalog, String schema, String table) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getIndexInfo( catalog, schema, table );
- }
- }
-
- public Iterator getPrimaryKeys(String catalog, String schema, String name) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getPrimaryKeys( catalog, schema, name );
- }
- }
-
- public Iterator getTables(String catalog, String schema, String table) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getTables( catalog, schema, table );
- }
- }
-
- public boolean needQuote(String name) {
- return delegate.needQuote( name );
- }
-
- public void setDelegate(Object object) {
- this.delegate = null;
- }
-
- public void setFailOnDelegateAccess(boolean b) {
- failOnDelegateAccess = b;
- }
-
- public Iterator getSuggestedPrimaryKeyStrategyName(String catalog, String schema, String name) {
- if(failOnDelegateAccess) {
- throw new IllegalStateException("delegate not accessible");
- } else {
- return delegate.getSuggestedPrimaryKeyStrategyName(catalog, schema, name);
- }
- }
-
- }
-
- public void testCachedDialect() {
- Settings buildSettings = cfg.buildSettings();
-
- MetaDataDialect realMetaData = JDBCReaderFactory.newMetaDataDialect( buildSettings.getDialect(), cfg.getProperties() );
-
- MockedMetaDataDialect mock = new MockedMetaDataDialect(realMetaData);
- CachedMetaDataDialect dialect = new CachedMetaDataDialect(mock);
-
- JDBCReader reader = JDBCReaderFactory.newJDBCReader( buildSettings, new DefaultReverseEngineeringStrategy(), dialect );
-
- DatabaseCollector dc = new DefaultDatabaseCollector();
- reader.readDatabaseSchema( dc, null, null );
-
- validate( dc );
-
- mock.setFailOnDelegateAccess(true);
-
- reader = JDBCReaderFactory.newJDBCReader( buildSettings, new DefaultReverseEngineeringStrategy(), dialect );
-
- dc = new DefaultDatabaseCollector();
- reader.readDatabaseSchema( dc, null, null );
-
- validate(dc);
-
-
-
-
- }
-
- private void validate(DatabaseCollector dc) {
- Iterator iterator = dc.iterateTables();
- Table firstChild = (Table) iterator.next();
- assertEquals(firstChild.getName(), "CHILD");
- assertTrue(iterator.hasNext());
-
- iterator = dc.iterateTables();
- assertNotNull(iterator.next());
- assertNotNull(iterator.next());
- assertFalse(iterator.hasNext());
-
-
- assertHasNext("should have recorded one foreignkey to child table", 1, firstChild.getForeignKeyIterator() );
- }
-
- protected String[] getCreateSQL() {
-
- return new String[] {
- "create table master ( id char not null, name varchar(20), primary key (id) )",
- "create table child ( childid char not null, masterref char, primary key (childid), foreign key (masterref) references master(id) )",
- };
- }
-
- protected String[] getDropSQL() {
-
- return new String[] {
- "drop table child",
- "drop table master",
- };
- }
-
-}
+/*
+ * Created on 2004-12-01
+ *
+ */
+package org.hibernate.tool.hbm2x;
+
+import java.util.Iterator;
+
+import org.hibernate.cfg.JDBCMetaDataConfiguration;
+import org.hibernate.cfg.JDBCReaderFactory;
+import org.hibernate.cfg.Settings;
+import org.hibernate.cfg.reveng.DatabaseCollector;
+import org.hibernate.cfg.reveng.DefaultDatabaseCollector;
+import org.hibernate.cfg.reveng.DefaultReverseEngineeringStrategy;
+import org.hibernate.cfg.reveng.JDBCReader;
+import org.hibernate.cfg.reveng.ReverseEngineeringRuntimeInfo;
+import org.hibernate.cfg.reveng.dialect.CachedMetaDataDialect;
+import org.hibernate.cfg.reveng.dialect.MetaDataDialect;
+import org.hibernate.mapping.Table;
+import org.hibernate.tool.JDBCMetaDataBinderTestCase;
+
+
+
+/**
+ * @author max
+ *
+ */
+public class CachedMetaDataTest extends JDBCMetaDataBinderTestCase {
+
+ protected void configure(JDBCMetaDataConfiguration configuration) {
+ super.configure( configuration );
+ }
+
+ public class MockedMetaDataDialect implements MetaDataDialect {
+
+ MetaDataDialect delegate;
+ private boolean failOnDelegateAccess;
+
+ public MockedMetaDataDialect(MetaDataDialect realMetaData) {
+ delegate = realMetaData;
+ }
+
+ public void close() {
+ delegate.close();
+ }
+
+ public void close(Iterator iterator) {
+ delegate.close( iterator );
+ }
+
+ public void configure(ReverseEngineeringRuntimeInfo info) {
+ delegate.configure(info);
+ }
+
+ public Iterator getColumns(String catalog, String schema, String table, String column) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getColumns( catalog, schema, table, column );
+ }
+ }
+
+ public Iterator getExportedKeys(String catalog, String schema, String table) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getExportedKeys( catalog, schema, table );
+ }
+ }
+
+ public Iterator getIndexInfo(String catalog, String schema, String table) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getIndexInfo( catalog, schema, table );
+ }
+ }
+
+ public Iterator getPrimaryKeys(String catalog, String schema, String name) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getPrimaryKeys( catalog, schema, name );
+ }
+ }
+
+ public Iterator getTables(String catalog, String schema, String table) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getTables( catalog, schema, table );
+ }
+ }
+
+ public boolean needQuote(String name) {
+ return delegate.needQuote( name );
+ }
+
+ public void setDelegate(Object object) {
+ this.delegate = null;
+ }
+
+ public void setFailOnDelegateAccess(boolean b) {
+ failOnDelegateAccess = b;
+ }
+
+ public Iterator getSuggestedPrimaryKeyStrategyName(String catalog, String schema, String name) {
+ if(failOnDelegateAccess) {
+ throw new IllegalStateException("delegate not accessible");
+ } else {
+ return delegate.getSuggestedPrimaryKeyStrategyName(catalog, schema, name);
+ }
+ }
+
+ }
+
+ public void testCachedDialect() {
+ Settings buildSettings = cfg.buildSettings();
+
+ MetaDataDialect realMetaData = JDBCReaderFactory.newMetaDataDialect( buildSettings.getDialect(), cfg.getProperties() );
+
+ MockedMetaDataDialect mock = new MockedMetaDataDialect(realMetaData);
+ CachedMetaDataDialect dialect = new CachedMetaDataDialect(mock);
+
+ JDBCReader reader = JDBCReaderFactory.newJDBCReader( buildSettings, new DefaultReverseEngineeringStrategy(), dialect );
+
+ DatabaseCollector dc = new DefaultDatabaseCollector();
+ reader.readDatabaseSchema( dc, null, null );
+
+ validate( dc );
+
+ mock.setFailOnDelegateAccess(true);
+
+ reader = JDBCReaderFactory.newJDBCReader( buildSettings, new DefaultReverseEngineeringStrategy(), dialect );
+
+ dc = new DefaultDatabaseCollector();
+ reader.readDatabaseSchema( dc, null, null );
+
+ validate(dc);
+
+
+
+
+ }
+
+ private void validate(DatabaseCollector dc) {
+ Iterator iterator = dc.iterateTables();
+ Table firstChild = (Table) iterator.next();
+ assertTrue("CHILD".equals(firstChild.getName()) ||
+ "MASTER".equals(firstChild.getName()));
+ assertTrue(iterator.hasNext());
+
+ iterator = dc.iterateTables();
+ assertNotNull(iterator.next());
+ assertNotNull(iterator.next());
+ assertFalse(iterator.hasNext());
+
+
+ assertHasNext("should have recorded one foreignkey to child table", 1, firstChild.getForeignKeyIterator() );
+ }
+
+ protected String[] getCreateSQL() {
+
+ return new String[] {
+ "create table master ( id char not null, name varchar(20), primary key (id) )",
+ "create table child ( childid char not null, masterref char, primary key (childid), foreign key (masterref) references master(id) )",
+ };
+ }
+
+ protected String[] getDropSQL() {
+
+ return new String[] {
+ "drop table child",
+ "drop table master",
+ };
+ }
+
+}
16 years, 10 months
Hibernate SVN: r14502 - core/branches/Branch_3_2/test/org/hibernate/test/cascade.
by hibernate-commits@lists.jboss.org
Author: gbadner
Date: 2008-04-11 00:47:54 -0400 (Fri, 11 Apr 2008)
New Revision: 14502
Added:
core/branches/Branch_3_2/test/org/hibernate/test/cascade/A.java
core/branches/Branch_3_2/test/org/hibernate/test/cascade/G.java
core/branches/Branch_3_2/test/org/hibernate/test/cascade/H.java
core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascade.hbm.xml
core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascadeTest.java
Modified:
core/branches/Branch_3_2/test/org/hibernate/test/cascade/CascadeSuite.java
Log:
HHH-3229 : Added test cases for modifying an entity with multiple paths to associated entities
Added: core/branches/Branch_3_2/test/org/hibernate/test/cascade/A.java
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/A.java (rev 0)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/A.java 2008-04-11 04:47:54 UTC (rev 14502)
@@ -0,0 +1,106 @@
+// $Id$
+
+package org.hibernate.test.cascade;
+
+import java.util.Set;
+import java.util.HashSet;
+
+/**
+ * @author <a href="mailto:ovidiu@feodorov.com">Ovidiu Feodorov</a>
+ *
+ * Copyright 2008 Ovidiu Feodorov
+ *
+ */
+public class A
+{
+ // Constants -----------------------------------------------------------------------------------
+
+ // Static --------------------------------------------------------------------------------------
+
+ // Attributes ----------------------------------------------------------------------------------
+
+ private long id;
+
+ private String data;
+
+ // A 1 - * H
+ private Set hs;
+
+ // A 1 - 1 G
+ private G g;
+
+
+ // Constructors --------------------------------------------------------------------------------
+
+ public A()
+ {
+ hs = new HashSet();
+ }
+
+ public A(String data)
+ {
+ this();
+ this.data = data;
+ }
+
+ // Public --------------------------------------------------------------------------------------
+
+ public long getId()
+ {
+ return id;
+ }
+
+ public void setId(long id)
+ {
+ this.id = id;
+ }
+
+ public void setData(String data)
+ {
+ this.data = data;
+ }
+
+ public String getData()
+ {
+ return data;
+ }
+
+ public void setHs(Set hs)
+ {
+ this.hs = hs;
+ }
+
+ public Set getHs()
+ {
+ return hs;
+ }
+
+ public void setG(G g)
+ {
+ this.g = g;
+ }
+
+ public G getG()
+ {
+ return g;
+ }
+
+ public void addH(H h)
+ {
+ hs.add(h);
+ h.setA(this);
+ }
+
+ public String toString()
+ {
+ return "A[" + id + ", " + data + "]";
+ }
+
+ // Package protected ---------------------------------------------------------------------------
+
+ // Protected -----------------------------------------------------------------------------------
+
+ // Private -------------------------------------------------------------------------------------
+
+ // Inner classes -------------------------------------------------------------------------------
+}
Property changes on: core/branches/Branch_3_2/test/org/hibernate/test/cascade/A.java
___________________________________________________________________
Name: svn:executable
+ *
Modified: core/branches/Branch_3_2/test/org/hibernate/test/cascade/CascadeSuite.java
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/CascadeSuite.java 2008-04-10 19:32:31 UTC (rev 14501)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/CascadeSuite.java 2008-04-11 04:47:54 UTC (rev 14502)
@@ -12,6 +12,7 @@
TestSuite suite = new TestSuite( "Cascade tests" );
suite.addTest( BidirectionalOneToManyCascadeTest.suite() );
suite.addTest( RefreshTest.suite() );
+ suite.addTest( MultiPathCascadeTest.suite() );
return suite;
}
}
Added: core/branches/Branch_3_2/test/org/hibernate/test/cascade/G.java
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/G.java (rev 0)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/G.java 2008-04-11 04:47:54 UTC (rev 14502)
@@ -0,0 +1,95 @@
+package org.hibernate.test.cascade;
+
+import java.util.Set;
+import java.util.HashSet;
+
+/**
+ * @author <a href="mailto:ovidiu@feodorov.com">Ovidiu Feodorov</a>
+ *
+ * Copyright 2008 Ovidiu Feodorov
+ *
+ * @version <tt>$Revision$</tt>
+ *
+ * $Id$
+ */
+public class G
+{
+ // Constants -----------------------------------------------------------------------------------
+
+ // Static --------------------------------------------------------------------------------------
+
+ // Attributes ----------------------------------------------------------------------------------
+
+ private long id;
+
+ private String data;
+
+ // A 1 <-> 1 G
+ private A a;
+
+ // G * <-> * H
+ private Set hs;
+
+ // Constructors --------------------------------------------------------------------------------
+
+ public G()
+ {
+ this(null);
+ }
+
+ public G(String data)
+ {
+ this.data = data;
+ hs = new HashSet();
+ }
+
+ // Public --------------------------------------------------------------------------------------
+
+ public String getData()
+ {
+ return data;
+ }
+
+ public void setData(String data)
+ {
+ this.data = data;
+ }
+
+ public A getA()
+ {
+ return a;
+ }
+
+ public void setA(A a)
+ {
+ this.a = a;
+ }
+
+ public Set getHs()
+ {
+ return hs;
+ }
+
+ public void setHs(Set s)
+ {
+ hs = s;
+ }
+
+ // Package protected ---------------------------------------------------------------------------
+
+ long getId()
+ {
+ return id;
+ }
+
+ // Protected -----------------------------------------------------------------------------------
+
+ // Private -------------------------------------------------------------------------------------
+
+ private void setId(long id)
+ {
+ this.id = id;
+ }
+
+ // Inner classes -------------------------------------------------------------------------------
+}
Property changes on: core/branches/Branch_3_2/test/org/hibernate/test/cascade/G.java
___________________________________________________________________
Name: svn:executable
+ *
Added: core/branches/Branch_3_2/test/org/hibernate/test/cascade/H.java
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/H.java (rev 0)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/H.java 2008-04-11 04:47:54 UTC (rev 14502)
@@ -0,0 +1,94 @@
+// $Id$
+
+
+package org.hibernate.test.cascade;
+
+import java.util.Set;
+import java.util.HashSet;
+
+/**
+ * @author <a href="mailto:ovidiu@feodorov.com">Ovidiu Feodorov</a>
+ *
+ * Copyright 2008 Ovidiu Feodorov
+ *
+ */
+public class H
+{
+ // Constants -----------------------------------------------------------------------------------
+
+ // Static --------------------------------------------------------------------------------------
+
+ // Attributes ----------------------------------------------------------------------------------
+
+ private long id;
+
+ private String data;
+
+ private A a;
+
+ // G * <-> * H
+ private Set gs;
+
+ // Constructors --------------------------------------------------------------------------------
+
+ public H()
+ {
+ this(null);
+ }
+
+ public H(String data)
+ {
+ this.data = data;
+ gs = new HashSet();
+ }
+
+ // Public --------------------------------------------------------------------------------------
+
+ public long getId()
+ {
+ return id;
+ }
+
+ public String getData()
+ {
+ return data;
+ }
+
+ public void setData(String data)
+ {
+ this.data = data;
+ }
+
+ public A getA()
+ {
+ return a;
+ }
+
+ public void setA(A a)
+ {
+ this.a = a;
+ }
+
+ public Set getGs()
+ {
+ return gs;
+ }
+
+ public void setGs(Set gs)
+ {
+ this.gs = gs;
+ }
+
+ // Package protected ---------------------------------------------------------------------------
+
+ // Protected -----------------------------------------------------------------------------------
+
+ // Private -------------------------------------------------------------------------------------
+
+ private void setId(long id)
+ {
+ this.id = id;
+ }
+
+ // Inner classes -------------------------------------------------------------------------------
+}
Property changes on: core/branches/Branch_3_2/test/org/hibernate/test/cascade/H.java
___________________________________________________________________
Name: svn:executable
+ *
Added: core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascade.hbm.xml
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascade.hbm.xml (rev 0)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascade.hbm.xml 2008-04-11 04:47:54 UTC (rev 14502)
@@ -0,0 +1,67 @@
+<?xml version="1.0"?>
+<!DOCTYPE hibernate-mapping SYSTEM "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd" >
+
+<hibernate-mapping package="org.hibernate.test.cascade">
+
+ <class name="A" table="HB_A">
+
+ <id name="id" type="long"><generator class="native"/></id>
+
+ <property name="data" type="string" not-null="true"/>
+
+ <!--
+ Associations
+ -->
+
+ <set name="hs" inverse="true" cascade="all">
+ <key column="a_fk"/>
+ <one-to-many class="H"/>
+ </set>
+ <one-to-one name="g" class="G" property-ref="a" cascade="all"/>
+
+ </class>
+
+ <class name="G" table="HB_G">
+
+ <id name="id" type="long"><generator class="native"/></id>
+
+ <property name="data" type="string" not-null="true"/>
+
+ <!--
+ Associations
+ -->
+
+ <set name="hs" inverse="true" table="HB_G_H" cascade="all">
+ <key column="g_fk"/>
+ <many-to-many class="H" column="h_fk"/>
+ </set>
+
+ <many-to-one name="a"
+ column="aId"
+ unique="true"
+ not-null="false"/>
+
+ </class>
+
+ <class name="H" table="HB_H">
+
+ <id name="id" type="long"><generator class="native"/></id>
+
+ <property name="data" type="string" not-null="true"/>
+
+ <!--
+ Associations
+ -->
+
+ <!-- *NOT* cascaded -->
+ <set name="gs" table="HB_G_H">
+ <key column="h_fk"/>
+ <many-to-many class="G" column="g_fk"/>
+ </set>
+
+ <many-to-one name="a" column="a_fk" class="A"/>
+
+ </class>
+
+
+</hibernate-mapping>
\ No newline at end of file
Added: core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascadeTest.java
===================================================================
--- core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascadeTest.java (rev 0)
+++ core/branches/Branch_3_2/test/org/hibernate/test/cascade/MultiPathCascadeTest.java 2008-04-11 04:47:54 UTC (rev 14502)
@@ -0,0 +1,160 @@
+//$Id: $
+
+package org.hibernate.test.cascade;
+
+import java.util.Collections;
+
+import junit.framework.Test;
+
+import org.hibernate.Session;
+import org.hibernate.Transaction;
+import org.hibernate.junit.functional.FunctionalTestCase;
+import org.hibernate.junit.functional.FunctionalTestClassTestSuite;
+
+/**
+ * @author <a href="mailto:ovidiu@feodorov.com">Ovidiu Feodorov</a>
+ * @author Gail Badner
+ *
+ */
+
+public class MultiPathCascadeTest extends FunctionalTestCase {
+
+ public MultiPathCascadeTest(String name) {
+ super( name );
+ }
+
+ public String[] getMappings() {
+ return new String[] {
+ "cascade/MultiPathCascade.hbm.xml"
+ };
+ }
+
+ public static Test suite() {
+ return new FunctionalTestClassTestSuite( MultiPathCascadeTest.class );
+ }
+
+ public void testMultiPathMergeDetachedFailureExpected() throws Exception
+ {
+ // persist a simple A in the database
+
+ Session s = openSession();
+ s.beginTransaction();
+ A a = new A();
+ a.setData("Anna");
+ s.save(a);
+ s.getTransaction().commit();
+ s.close();
+
+ // modify detached entity
+ modifyEntity( a );
+
+ s = openSession();
+ s.beginTransaction();
+ s.merge(a);
+ s.getTransaction().commit();
+ s.close();
+
+ verifyModifications( a.getId() );
+ }
+
+ public void testMultiPathUpdateDetached() throws Exception
+ {
+ // persist a simple A in the database
+
+ Session s = openSession();
+ s.beginTransaction();
+ A a = new A();
+ a.setData("Anna");
+ s.save(a);
+ s.getTransaction().commit();
+ s.close();
+
+ // modify detached entity
+ modifyEntity( a );
+
+ s = openSession();
+ s.beginTransaction();
+ s.update(a);
+ s.getTransaction().commit();
+ s.close();
+
+ verifyModifications( a.getId() );
+ }
+
+ public void testMultiPathGetAndModify() throws Exception
+ {
+ // persist a simple A in the database
+
+ Session s = openSession();
+ s.beginTransaction();
+ A a = new A();
+ a.setData("Anna");
+ s.save(a);
+ s.getTransaction().commit();
+ s.close();
+
+ s = openSession();
+ s.beginTransaction();
+ // retrieve the previously saved instance from the database, and update it
+ a = ( A ) s.get( A.class, new Long( a.getId() ) );
+ modifyEntity( a );
+ s.getTransaction().commit();
+ s.close();
+
+ verifyModifications( a.getId() );
+ }
+
+ private void modifyEntity(A a) {
+ // create a *circular* graph in detached entity
+ a.setData("Anthony");
+
+ G g = new G();
+ g.setData("Giovanni");
+
+ H h = new H();
+ h.setData("Hellen");
+
+ a.setG(g);
+ g.setA(a);
+
+ a.getHs().add(h);
+ h.setA(a);
+
+ g.getHs().add(h);
+ h.getGs().add(g);
+ }
+
+ private void verifyModifications(long aId) {
+ Session s = openSession();
+ s.beginTransaction();
+
+ // retrieve the A object and check it
+ A a = ( A ) s.get( A.class, new Long( aId ) );
+ assertEquals( aId, a.getId() );
+ assertEquals( "Anthony", a.getData() );
+ assertNotNull( a.getG() );
+ assertNotNull( a.getHs() );
+ assertEquals( 1, a.getHs().size() );
+
+ G gFromA = a.getG();
+ H hFromA = ( H ) a.getHs().iterator().next();
+
+ // check the G object
+ assertEquals( "Giovanni", gFromA.getData() );
+ assertSame( a, gFromA.getA() );
+ assertNotNull( gFromA.getHs() );
+ assertEquals( a.getHs(), gFromA.getHs() );
+ assertSame( hFromA, gFromA.getHs().iterator().next() );
+
+ // check the H object
+ assertEquals( "Hellen", hFromA.getData() );
+ assertSame( a, hFromA.getA() );
+ assertNotNull( hFromA.getGs() );
+ assertEquals( 1, hFromA.getGs().size() );
+ assertSame( gFromA, hFromA.getGs().iterator().next() );
+
+ s.getTransaction().commit();
+ s.close();
+ }
+
+}
\ No newline at end of file
16 years, 10 months
Hibernate SVN: r14501 - in search/trunk: src/java/org/hibernate/search/backend and 3 other directories.
by hibernate-commits@lists.jboss.org
Author: epbernard
Date: 2008-04-10 15:32:31 -0400 (Thu, 10 Apr 2008)
New Revision: 14501
Added:
search/trunk/src/java/org/hibernate/search/backend/configuration/
search/trunk/src/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java
search/trunk/src/test/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java
search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
search/trunk/src/test/org/hibernate/search/test/configuration/UselessShardingStrategy.java
Modified:
search/trunk/doc/reference/en/modules/configuration.xml
search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java
search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
search/trunk/src/test/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java
Log:
HSEARCH-176 support defaut in the lucene properties. Refactor the proeprty support and fix various bugs (Sanne Grinovero)
Modified: search/trunk/doc/reference/en/modules/configuration.xml
===================================================================
--- search/trunk/doc/reference/en/modules/configuration.xml 2008-04-09 12:06:41 UTC (rev 14500)
+++ search/trunk/doc/reference/en/modules/configuration.xml 2008-04-10 19:32:31 UTC (rev 14501)
@@ -610,7 +610,12 @@
explicitly set, the value will default to the
<literal>.transaction</literal> property.</para>
- <para>For more information about Lucene indexing performances, please
+ <para>
+ The default for all values is to leave them at Lucene's own default,
+ so the listed values in the following table actually depend on the
+ version of Lucene you are using;
+ values shown are relative to version <literal>2.3</literal>.
+ For more information about Lucene indexing performances, please
refer to the Lucene documentation.</para>
<table>
@@ -664,7 +669,7 @@
<para>Used by Hibernate Search during index update operations as
part of database modifications.</para></entry>
- <entry>10</entry>
+ <entry>Disabled (not set)</entry>
</row>
<row>
@@ -716,7 +721,7 @@
<para>Used during indexing via
<literal>FullTextSession.index()</literal></para></entry>
- <entry>10</entry>
+ <entry>Disabled (not set)</entry>
</row>
<row>
Modified: search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java 2008-04-09 12:06:41 UTC (rev 14500)
+++ search/trunk/src/java/org/hibernate/search/backend/LuceneIndexingParameters.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -2,10 +2,14 @@
package org.hibernate.search.backend;
import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.index.IndexWriter;
+import org.hibernate.search.backend.configuration.IndexWriterSetting;
/**
* Wrapper class around the Lucene indexing parameters <i>mergeFactor</i>, <i>maxMergeDocs</i>,
@@ -16,25 +20,39 @@
*
* @author Hardy Ferentschik
* @author Sanne Grinovero
- *
*/
public class LuceneIndexingParameters implements Serializable {
private static final Log log = LogFactory.getLog( LuceneIndexingParameters.class );
+
+ // value keyword
+ public static final String EXPLICIT_DEFAULT_VALUE = "default";
+ // property path keywords
+ public static final String BATCH = "batch.";
+ public static final String TRANSACTION = "transaction.";
- private final ParameterSet transactionIndexParameters = new ParameterSet();
- private final ParameterSet batchIndexParameters = new ParameterSet();
+ private final ParameterSet transactionIndexParameters;
+ private final ParameterSet batchIndexParameters;
- /**
- * Constructor which instantiates new parameter objects with the the default values.
- */
- public LuceneIndexingParameters() {
- //FIXME: I would recommend setting the following parameters as defaults for batch indexing:
- //batchIndexParameters.setMaxBufferedDocs(null);
- //batchIndexParameters.setRamBufferSizeMB(64);
+ public LuceneIndexingParameters( Properties sourceProps ) {
+ Properties transactionProps = new Properties();
+ Properties batchProps = new Properties( transactionProps ); // transaction settings is the default for batch
+ //don't iterate on property entries we know all the keys:
+ for ( IndexWriterSetting t : IndexWriterSetting.values() ) {
+ String key = t.getKey();
+ String trxValue = sourceProps.getProperty( TRANSACTION + key );
+ if (trxValue != null) {
+ transactionProps.setProperty( key, trxValue );
+ }
+ String batchValue = sourceProps.getProperty( BATCH + key );
+ if (batchValue != null) {
+ batchProps.setProperty( key, batchValue );
+ }
+ }
+ transactionIndexParameters = new ParameterSet(transactionProps);
+ batchIndexParameters = new ParameterSet(batchProps);
+ }
- }
-
public ParameterSet getTransactionIndexParameters() {
return transactionIndexParameters;
}
@@ -44,65 +62,47 @@
}
public class ParameterSet implements Serializable {
-
- private Integer mergeFactor = null;
- private Integer maxMergeDocs = null;
- private Integer maxBufferedDocs = null;
- private Integer termIndexInterval = null;
- private Integer ramBufferSizeMB = null;
-
- public Integer getMergeFactor() {
- return mergeFactor;
+
+ final Map<IndexWriterSetting, Integer> parameters = new HashMap<IndexWriterSetting, Integer>();
+
+ public ParameterSet(Properties prop) {
+ for ( IndexWriterSetting t : IndexWriterSetting.values() ) {
+ String value = prop.getProperty( t.getKey() );
+ if ( ! (value==null || EXPLICIT_DEFAULT_VALUE.equals(value) ) ) {
+ parameters.put( t, t.parseVal(value) );
+ }
+ }
}
- public void setMergeFactor(Integer mergeFactor) {
- this.mergeFactor = mergeFactor;
- }
- public Integer getMaxMergeDocs() {
- return maxMergeDocs;
- }
- public void setMaxMergeDocs(Integer maxMergeDocs) {
- this.maxMergeDocs = maxMergeDocs;
- }
- public Integer getMaxBufferedDocs() {
- return maxBufferedDocs;
- }
- public void setMaxBufferedDocs(Integer maxBufferedDocs) {
- this.maxBufferedDocs = maxBufferedDocs;
- }
- public Integer getRamBufferSizeMB() {
- return ramBufferSizeMB;
- }
- public void setRamBufferSizeMB(Integer ramBufferSizeMB) {
- this.ramBufferSizeMB = ramBufferSizeMB;
- }
- public Integer getTermIndexInterval() {
- return termIndexInterval;
- }
- public void setTermIndexInterval(Integer termIndexInterval) {
- this.termIndexInterval = termIndexInterval;
- }
-
+
/**
* Applies the parameters represented by this to a writer.
* Undefined parameters are not set, leaving the lucene default.
* @param writer the IndexWriter whereto the parameters will be applied.
*/
- void applyToWriter(IndexWriter writer){
+ public void applyToWriter(IndexWriter writer) {
try {
- if (mergeFactor!=null)
- writer.setMergeFactor(mergeFactor);
- if (maxMergeDocs!=null)
- writer.setMaxMergeDocs(maxMergeDocs);
- if (maxBufferedDocs!=null)
- writer.setMaxBufferedDocs(maxBufferedDocs);
- if (ramBufferSizeMB!=null)
- writer.setRAMBufferSizeMB(ramBufferSizeMB);
- if (termIndexInterval!=null)
- writer.setTermIndexInterval(termIndexInterval);
- }catch (IllegalArgumentException e) {
- log.error("Illegal IndexWriter setting"+e.getMessage()+". Will use default settings!");
+ for ( Map.Entry<IndexWriterSetting,Integer> entry : parameters.entrySet() ) {
+ entry.getKey().applySetting( writer, entry.getValue() );
+ }
+ } catch (IllegalArgumentException e) {
+ //FIXME shouldn't we raise an exception instead
+ log.error( "Illegal IndexWriter setting" + e.getMessage()
+ + ". Will use default settings." );
}
}
+
+ public Integer getCurrentValueFor(IndexWriterSetting ws){
+ return parameters.get(ws);
+ }
+
+ public void setCurrentValueFor(IndexWriterSetting ws, Integer newValue){
+ if (newValue==null){
+ parameters.remove(ws);
+ } else {
+ parameters.put(ws, newValue);
+ }
+ }
}
+
}
Added: search/trunk/src/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/backend/configuration/IndexWriterSetting.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -0,0 +1,60 @@
+package org.hibernate.search.backend.configuration;
+
+import java.io.Serializable;
+
+import org.apache.lucene.index.IndexWriter;
+import org.hibernate.search.SearchException;
+
+public enum IndexWriterSetting implements Serializable {
+
+ MERGE_FACTOR( "merge_factor" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMergeFactor( value );
+ }
+ } ,
+ MAX_MERGE_DOCS( "max_merge_docs" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxMergeDocs( value );
+ }
+ } ,
+ MAX_BUFFERED_DOCS( "max_buffered_docs" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setMaxBufferedDocs( value );
+ }
+ } ,
+ RAM_BUFFER_SIZE( "ram_buffer_size" ) {
+ public void applySetting(IndexWriter writer, int value) {
+ writer.setRAMBufferSizeMB( value );
+ }
+ };
+
+ private final String cfgKey;
+
+ IndexWriterSetting(String configurationKey){
+ this.cfgKey = configurationKey;
+ }
+
+ /**
+ * @throws IllegalArgumentException when user selects an invalid value; should be wrapped.
+ */
+ public abstract void applySetting(IndexWriter writer, int value);
+
+ public String getKey() {
+ return cfgKey;
+ }
+
+ /**
+ * Specific parameters may override to provide additional keywords support.
+ * @param value the string value as in configuration file
+ * @return the integer value going to be set as parameter
+ * @throws SearchException for unrecognized values
+ */
+ public Integer parseVal(String value) {
+ try {
+ return Integer.valueOf( value );
+ } catch (NumberFormatException ne) {
+ throw new SearchException( "Invalid value for " + cfgKey + ": " + value );
+ }
+ }
+
+}
Modified: search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-04-09 12:06:41 UTC (rev 14500)
+++ search/trunk/src/java/org/hibernate/search/store/DirectoryProviderFactory.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -2,24 +2,25 @@
package org.hibernate.search.store;
import java.util.ArrayList;
+import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.locks.ReentrantLock;
import org.hibernate.HibernateException;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.cfg.Configuration;
+import org.hibernate.mapping.PersistentClass;
+import org.hibernate.search.SearchException;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.backend.LuceneIndexingParameters;
-import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.engine.SearchFactoryImplementor;
-import org.hibernate.search.store.optimization.OptimizerStrategy;
+import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.store.optimization.IncrementalOptimizerStrategy;
import org.hibernate.search.store.optimization.NoOpOptimizerStrategy;
-import org.hibernate.search.SearchException;
-import org.hibernate.mapping.PersistentClass;
-import org.hibernate.annotations.common.reflection.ReflectionManager;
-import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.search.store.optimization.OptimizerStrategy;
import org.hibernate.util.ReflectHelper;
import org.hibernate.util.StringHelper;
@@ -49,15 +50,6 @@
private static String LUCENE_DEFAULT = LUCENE_PREFIX + "default.";
private static String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName();
- // Lucene index performance parameters
- private static final String MERGE_FACTOR = "merge_factor";
- private static final String MAX_MERGE_DOCS = "max_merge_docs";
- private static final String MAX_BUFFERED_DOCS = "max_buffered_docs";
- private static final String RAM_BUFFER_SIZE = "ram_buffer_size";
-
- private static final String BATCH = "batch.";
- private static final String TRANSACTION = "transaction.";
-
private static final String SHARDING_STRATEGY = "sharding_strategy";
private static final String NBR_OF_SHARDS = SHARDING_STRATEGY + ".nbr_of_shards";
@@ -80,11 +72,15 @@
//define sharding strategy
IndexShardingStrategy shardingStrategy;
Properties shardingProperties = new Properties();
- for (Map.Entry entry : indexProps[0].entrySet()) {
- if ( ( (String) entry.getKey() ).startsWith( SHARDING_STRATEGY ) ) {
- shardingProperties.put( entry.getKey(), entry.getValue() );
+ //we use an enumeration to get the keys from defaultProperties as well
+ Enumeration<String> allProps = (Enumeration<String>) indexProps[0].propertyNames();
+ while ( allProps.hasMoreElements() ){
+ String key = allProps.nextElement();
+ if ( key.startsWith( SHARDING_STRATEGY ) ) {
+ shardingProperties.put( key, indexProps[0].getProperty( key ) );
}
}
+
String shardingStrategyName = shardingProperties.getProperty( SHARDING_STRATEGY );
if ( shardingStrategyName == null) {
if ( indexProps.length == 1 ) {
@@ -183,7 +179,7 @@
* If there are no matching properties in the configuration default values will be applied.
* <p>
* NOTE:</br>
- * If a non batch value is set in the configuration apply it also to the
+ * If a non batch value is set in the configuration apply it also to the
* batch mode. This covers the case where users only specify
* parameters for the non batch mode. In this case the same parameters apply for
* batch indexing.
@@ -194,179 +190,82 @@
* @param provider The directory provider for which to configure the indexing parameters.
*/
private void configureIndexingParameters(SearchFactoryImplementor searchFactoryImplementor, Properties indexProps, DirectoryProvider<?> provider) {
-
- LuceneIndexingParameters indexingParams = new LuceneIndexingParameters();
-
- {
- Integer val = getIntegerProperty(indexProps, TRANSACTION + MERGE_FACTOR);
- if (val!=null) {
- indexingParams.getTransactionIndexParameters().setMergeFactor(val);
- indexingParams.getBatchIndexParameters().setMergeFactor(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, TRANSACTION + MAX_MERGE_DOCS);
- if (val!=null) {
- indexingParams.getTransactionIndexParameters().setMaxMergeDocs(val);
- indexingParams.getBatchIndexParameters().setMaxMergeDocs(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, TRANSACTION + MAX_BUFFERED_DOCS);
- if (val!=null) {
- indexingParams.getTransactionIndexParameters().setMaxBufferedDocs(val);
- indexingParams.getBatchIndexParameters().setMaxBufferedDocs(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, TRANSACTION + RAM_BUFFER_SIZE);
- if (val!=null) {
- indexingParams.getTransactionIndexParameters().setRamBufferSizeMB(val);
- indexingParams.getBatchIndexParameters().setRamBufferSizeMB(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, BATCH + MERGE_FACTOR);
- if (val!=null) {
- indexingParams.getBatchIndexParameters().setMergeFactor(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, BATCH + MAX_MERGE_DOCS);
- if (val!=null) {
- indexingParams.getBatchIndexParameters().setMaxMergeDocs(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, BATCH + MAX_BUFFERED_DOCS);
- if (val!=null) {
- indexingParams.getBatchIndexParameters().setMaxBufferedDocs(val);
- }
- }
-
- {
- Integer val = getIntegerProperty(indexProps, BATCH + RAM_BUFFER_SIZE);
- if (val!=null) {
- indexingParams.getBatchIndexParameters().setRamBufferSizeMB(val);
- }
- }
-
- searchFactoryImplementor.addIndexingParmeters(provider, indexingParams);
+ LuceneIndexingParameters indexingParams = new LuceneIndexingParameters( indexProps );
+ searchFactoryImplementor.addIndexingParmeters( provider, indexingParams );
}
/**
- * @param indexProps The properties to look into for the value.
- * @param propertyName The value key.
- * @return null if the property is not defined, the parse value otherwise.
- * @throws SearchException if the property is defined but not in an integer format.
- */
- private Integer getIntegerProperty(Properties indexProps, String propertyName) {
- String propertyValue = indexProps.getProperty(propertyName);
- Integer i = null;
- if (StringHelper.isNotEmpty( propertyValue )) {
- try{
- i = Integer.valueOf(propertyValue);
- } catch (NumberFormatException ne) {
- throw new SearchException("Invalid value for " + propertyName + ": " + propertyValue);
- }
- }
- return i;
- }
-
- /**
* Returns an array of directory properties
- * Properties are defaulted. For a given proeprty name,
+ * Properties are defaulted. For a given property name,
* hibernate.search.indexname.n has priority over hibernate.search.indexname which has priority over hibernate.search
* If the Index is not sharded, a single Properties is returned
* If the index is sharded, the Properties index matches the shard index
- */
+ */
private static Properties[] getDirectoryProperties(Configuration cfg, String directoryProviderName) {
- Properties props = cfg.getProperties();
- String indexName = LUCENE_PREFIX + directoryProviderName;
- //indexSpecificProperties[i] >> indexSpecificDefaultproperties >> defaultProperties
- Properties defaultProperties = new Properties();
- ArrayList<Properties> indexSpecificProps = new ArrayList<Properties>();
- Properties indexSpecificDefaultProps = new Properties(defaultProperties);
- for ( Map.Entry entry : props.entrySet() ) {
- String key = (String) entry.getKey();
- if ( key.startsWith( LUCENE_DEFAULT ) ) {
- defaultProperties.setProperty( key.substring( LUCENE_DEFAULT.length() ), (String) entry.getValue() );
- }
- else if ( key.startsWith( indexName ) ) {
- String suffixedKey = key.substring( indexName.length() + 1 );
- int nextDoc = suffixedKey.indexOf( '.' );
- int index = -1;
- if ( nextDoc != -1 ) {
- String potentialNbr = suffixedKey.substring( 0, nextDoc );
- try {
- index = Integer.parseInt( potentialNbr );
+ Properties cfgAndImplicitProperties = new Properties();
+ // fcg has no defaults, so we may use keySet iteration
+ //FIXME not so sure about that cfg.setProperties()?
+ for ( Map.Entry entry : cfg.getProperties().entrySet() ) {
+ String key = entry.getKey().toString();// casting to String
+ if ( key.startsWith( LUCENE_PREFIX ) ) {
+ //put regular properties and add an explicit batch property when a transaction property is set
+ cfgAndImplicitProperties.put( key, entry.getValue() );
+ if ( key.contains( LuceneIndexingParameters.TRANSACTION ) ) {
+ //FIXME fix that transaction can appear in the index name
+ //I imagine checking the last '.transaction.' is safe.
+ String additionalKey = key.replaceFirst(LuceneIndexingParameters.TRANSACTION, LuceneIndexingParameters.BATCH);
+ if ( cfg.getProperty(additionalKey) == null ){
+ cfgAndImplicitProperties.put(additionalKey, cfg.getProperty(key) );
}
- catch ( Exception e ) {
- //just not a number
- index = -1;
- }
}
- if (index == -1) {
- indexSpecificDefaultProps.setProperty( suffixedKey, (String) entry.getValue() );
- }
- else {
- String finalKeyName = suffixedKey.substring( nextDoc + 1 );
- //ignore sharding strategy properties
- if ( ! finalKeyName.startsWith( SHARDING_STRATEGY ) ) {
- ensureListSize( indexSpecificProps, index + 1 );
- Properties propertiesforIndex = indexSpecificProps.get( index );
- if ( propertiesforIndex == null ) {
- propertiesforIndex = new Properties( indexSpecificDefaultProps );
- indexSpecificProps.set( index, propertiesforIndex );
- }
- propertiesforIndex.setProperty( finalKeyName, (String) entry.getValue() );
- }
- }
}
}
- String nbrOfShardsString = indexSpecificDefaultProps.getProperty( NBR_OF_SHARDS );
- int nbrOfShards = -1;
- if ( nbrOfShardsString != null ) {
- try {
- nbrOfShards = Integer.parseInt( nbrOfShardsString );
+ Properties globalProperties = new Properties();
+ Properties directoryLocalProperties = new Properties( globalProperties );
+ String directoryLocalPrefix = LUCENE_PREFIX + directoryProviderName + ".";
+ for ( Map.Entry entry : cfgAndImplicitProperties.entrySet() ) {
+ String key = entry.getKey().toString();// casting to String
+ if ( key.startsWith( LUCENE_DEFAULT ) ) {
+ globalProperties.put( key.substring( LUCENE_DEFAULT.length() ), entry.getValue() );
}
- catch (NumberFormatException e) {
- throw new SearchException(indexName + "." + NBR_OF_SHARDS + " is not a number", e);
+ else if ( key.startsWith( directoryLocalPrefix ) ) {
+ directoryLocalProperties.put( key.substring( directoryLocalPrefix.length() ),entry.getValue() );
}
}
- if ( nbrOfShards <= 0 && indexSpecificProps.size() == 0 ) {
- //no shard (a shareded subindex has to have at least one property
- return new Properties[] { indexSpecificDefaultProps };
- }
- else {
- //sharded
- nbrOfShards = nbrOfShards >= indexSpecificDefaultProps.size() ?
- nbrOfShards :
- indexSpecificDefaultProps.size();
- ensureListSize( indexSpecificProps, nbrOfShards );
- for ( int index = 0 ; index < nbrOfShards ; index++ ) {
- if ( indexSpecificProps.get( index ) == null ) {
- indexSpecificProps.set( index, new Properties( indexSpecificDefaultProps ) );
+ final String shardsCountValue = directoryLocalProperties.getProperty(NBR_OF_SHARDS);
+ if (shardsCountValue == null) {
+ // no shards: finished.
+ return new Properties[] { directoryLocalProperties };
+ } else {
+ // count shards
+ int shardsCount = -1;
+ {
+ try {
+ shardsCount = Integer.parseInt( shardsCountValue );
+ } catch (NumberFormatException e) {
+ if ( cfgAndImplicitProperties.getProperty(directoryLocalPrefix + NBR_OF_SHARDS ) != null)
+ throw new SearchException( shardsCountValue + " is not a number", e);
}
}
- return indexSpecificProps.toArray( new Properties[ indexSpecificProps.size() ] );
+ // create shard-specific Props
+ Properties[] shardLocalProperties = new Properties[shardsCount];
+ for ( int i = 0; i < shardsCount; i++ ) {
+ String currentShardPrefix = i + ".";
+ Properties currentProp = new Properties( directoryLocalProperties );
+ //Enumerations are ugly but otherwise we can't get the property defaults:
+ Enumeration<String> localProps = (Enumeration<String>) directoryLocalProperties.propertyNames();
+ while ( localProps.hasMoreElements() ){
+ String key = localProps.nextElement();
+ if ( key.startsWith( currentShardPrefix ) ) {
+ currentProp.setProperty( key.substring( currentShardPrefix.length() ), directoryLocalProperties.getProperty( key ) );
+ }
+ }
+ shardLocalProperties[i] = currentProp;
+ }
+ return shardLocalProperties;
}
}
- private static void ensureListSize(ArrayList<Properties> indexSpecificProps, int size) {
- //ensure the index exists
- indexSpecificProps.ensureCapacity( size );
- while ( indexSpecificProps.size() < size ) {
- indexSpecificProps.add(null);
- }
- }
-
private static String getDirectoryProviderName(XClass clazz, Configuration cfg) {
//yuk
ReflectionManager reflectionManager = SearchFactoryImpl.getReflectionManager(cfg);
Added: search/trunk/src/test/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java (rev 0)
+++ search/trunk/src/test/org/hibernate/search/test/configuration/ConfigurationReadTestCase.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -0,0 +1,73 @@
+package org.hibernate.search.test.configuration;
+
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.hibernate.search.backend.configuration.IndexWriterSetting;
+import org.hibernate.search.engine.SearchFactoryImplementor;
+import org.hibernate.search.impl.SearchFactoryImpl;
+import org.hibernate.search.test.SearchTestCase;
+
+/**
+ * Contains some utility methods to simplify coding of
+ * testcases about configuration parsing.
+ *
+ * @author Sanne Grinovero
+ */
+public abstract class ConfigurationReadTestCase extends SearchTestCase {
+
+ private SearchFactoryImplementor searchFactory;
+
+ protected enum TransactionType {
+ TRANSACTION, BATCH
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
+ searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
+ fullTextSession.close();
+ }
+
+ protected final void assertValueIsDefault(Class testEntity, TransactionType parmGroup, IndexWriterSetting setting) {
+ assertValueIsDefault( testEntity, 0, parmGroup, setting );
+ }
+
+ protected final void assertValueIsDefault(Class testEntity, int shard, TransactionType parmGroup, IndexWriterSetting setting) {
+ boolean batch = isBatch( parmGroup );
+ assertNull( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey() + " : value was expected unset!",
+ getParameter( shard, batch, setting, testEntity ) );
+ }
+
+ protected final void assertValueIsSet(Class testEntity, TransactionType parmGroup, IndexWriterSetting setting, int expectedValue) {
+ assertValueIsSet( testEntity, 0, parmGroup, setting, expectedValue );
+ }
+
+ protected final void assertValueIsSet(Class testEntity, int shard, TransactionType parmGroup, IndexWriterSetting setting, int expectedValue) {
+ boolean batch = isBatch( parmGroup );
+ assertNotNull( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey(),
+ getParameter( shard, batch, setting, testEntity ) );
+ assertEquals( "shard:" + shard + " batch=" + batch + " setting:" + setting.getKey(), expectedValue,
+ (int) getParameter( shard, batch, setting, testEntity ) );
+ }
+
+ protected final SearchFactoryImplementor getSearchFactory() {
+ return searchFactory;
+ }
+
+ private boolean isBatch(TransactionType parmGroup) {
+ return parmGroup == TransactionType.BATCH;
+ }
+
+ private Integer getParameter(int shard, boolean batch, IndexWriterSetting setting, Class testEntity) {
+ if ( batch ) {
+ return searchFactory.getIndexingParameters( searchFactory.getDirectoryProviders( testEntity )[shard] )
+ .getBatchIndexParameters().getCurrentValueFor( setting );
+ }
+ else {
+ return searchFactory.getIndexingParameters( searchFactory.getDirectoryProviders( testEntity )[shard] )
+ .getTransactionIndexParameters().getCurrentValueFor( setting );
+ }
+ }
+
+}
Modified: search/trunk/src/test/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java 2008-04-09 12:06:41 UTC (rev 14500)
+++ search/trunk/src/test/org/hibernate/search/test/configuration/LuceneIndexingParametersTest.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -1,34 +1,31 @@
package org.hibernate.search.test.configuration;
-import org.hibernate.search.FullTextSession;
-import org.hibernate.search.Search;
-import org.hibernate.search.backend.LuceneIndexingParameters;
-import org.hibernate.search.impl.SearchFactoryImpl;
import org.hibernate.search.test.Document;
-import org.hibernate.search.test.SearchTestCase;
import org.hibernate.search.test.query.Author;
import org.hibernate.search.test.query.Book;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_BUFFERED_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_MERGE_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MERGE_FACTOR;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.RAM_BUFFER_SIZE;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.TRANSACTION;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.BATCH;
/**
* @author Sanne Grinovero
*/
-public class LuceneIndexingParametersTest extends SearchTestCase {
+public class LuceneIndexingParametersTest extends ConfigurationReadTestCase {
protected void configure(org.hibernate.cfg.Configuration cfg) {
super.configure( cfg );
- //super sets:
- //cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
- //cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
-
cfg.setProperty( "hibernate.search.default.batch.ram_buffer_size", "1" );
+//set by super : cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
cfg.setProperty( "hibernate.search.default.transaction.ram_buffer_size", "2" );
cfg.setProperty( "hibernate.search.default.transaction.max_merge_docs", "9" );
- cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "10" );
+//set by super : cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
cfg.setProperty( "hibernate.search.default.transaction.max_buffered_docs", "11" );
- cfg.setProperty( "hibernate.search.Book.batch.ram_buffer_size", "3" );
cfg.setProperty( "hibernate.search.Book.batch.max_merge_docs", "12" );
cfg.setProperty( "hibernate.search.Book.batch.merge_factor", "13" );
cfg.setProperty( "hibernate.search.Book.batch.max_buffered_docs", "14" );
@@ -38,63 +35,55 @@
cfg.setProperty( "hibernate.search.Book.transaction.merge_factor", "16" );
cfg.setProperty( "hibernate.search.Book.transaction.max_buffered_docs", "17" );
- cfg.setProperty( "hibernate.search.Documents.ram_buffer_size", "4" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.ram_buffer_size", "default" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_merge_docs", "5" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.merge_factor", "6" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_buffered_docs", "7" );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_merge_docs", "9" );
}
- public void testUnsetBatchValueTakesTransaction() throws Exception {
- FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
- SearchFactoryImpl searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
- LuceneIndexingParameters indexingParameters = searchFactory.getIndexingParameters(searchFactory.getDirectoryProviders(Document.class)[0]);
- assertEquals(10, (int)indexingParameters.getBatchIndexParameters().getMergeFactor());
- assertEquals(1000, (int)indexingParameters.getBatchIndexParameters().getMaxBufferedDocs());
- fullTextSession.close();
+ public void testDefaultIndexProviderParameters() throws Exception {
+ assertValueIsSet( Author.class, TRANSACTION, RAM_BUFFER_SIZE, 2 );
+ assertValueIsSet( Author.class, TRANSACTION, MAX_MERGE_DOCS, 9 );
+ assertValueIsSet( Author.class, TRANSACTION, MAX_BUFFERED_DOCS, 11 );
+ assertValueIsSet( Author.class, TRANSACTION, MERGE_FACTOR, 100 );
}
- public void testBatchParametersDefault() throws Exception {
- FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
- SearchFactoryImpl searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
- LuceneIndexingParameters indexingParameters = searchFactory.getIndexingParameters(searchFactory.getDirectoryProviders(Author.class)[0]);
- assertEquals(1, (int)indexingParameters.getBatchIndexParameters().getRamBufferSizeMB());
- assertEquals(9, (int)indexingParameters.getBatchIndexParameters().getMaxMergeDocs());
- assertEquals(1000, (int)indexingParameters.getBatchIndexParameters().getMaxBufferedDocs());
- assertEquals(10, (int)indexingParameters.getBatchIndexParameters().getMergeFactor());
- fullTextSession.close();
+ public void testBatchParametersGlobals() throws Exception {
+ assertValueIsSet( Author.class, BATCH, RAM_BUFFER_SIZE, 1 );
+ assertValueIsSet( Author.class, BATCH, MAX_MERGE_DOCS, 9 );
+ assertValueIsSet( Author.class, BATCH, MAX_BUFFERED_DOCS, 1000 );
+ assertValueIsSet( Author.class, BATCH, MERGE_FACTOR, 100 );
}
- public void testTransactionParametersDefault() throws Exception {
- FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
- SearchFactoryImpl searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
- LuceneIndexingParameters indexingParameters = searchFactory.getIndexingParameters(searchFactory.getDirectoryProviders(Author.class)[0]);
- assertEquals(2, (int)indexingParameters.getTransactionIndexParameters().getRamBufferSizeMB());
- assertEquals(9, (int)indexingParameters.getTransactionIndexParameters().getMaxMergeDocs());
- assertEquals(11, (int)indexingParameters.getTransactionIndexParameters().getMaxBufferedDocs());
- assertEquals(10, (int)indexingParameters.getTransactionIndexParameters().getMergeFactor());
- fullTextSession.close();
+ public void testUnsetBatchValueTakesTransaction() throws Exception {
+ assertValueIsSet( Document.class, BATCH, MERGE_FACTOR, 6 );
+ assertValueIsSet( Document.class, BATCH, MAX_BUFFERED_DOCS, 7 );
}
- public void testBatchParameters() throws Exception {
- FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
- SearchFactoryImpl searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
- LuceneIndexingParameters indexingParameters = searchFactory.getIndexingParameters(searchFactory.getDirectoryProviders(Book.class)[0]);
- assertEquals(3, (int)indexingParameters.getBatchIndexParameters().getRamBufferSizeMB());
- assertEquals(12, (int)indexingParameters.getBatchIndexParameters().getMaxMergeDocs());
- assertEquals(14, (int)indexingParameters.getBatchIndexParameters().getMaxBufferedDocs());
- assertEquals(13, (int)indexingParameters.getBatchIndexParameters().getMergeFactor());
- fullTextSession.close();
+ public void testExplicitBatchParameters() throws Exception {
+ assertValueIsSet( Book.class, BATCH, MAX_MERGE_DOCS, 12 );
+ assertValueIsSet( Book.class, BATCH, MAX_BUFFERED_DOCS, 14 );
+ assertValueIsSet( Book.class, BATCH, MERGE_FACTOR, 13 );
}
+ public void testInheritedBatchParametersFromTranscation() throws Exception {
+ assertValueIsSet( Book.class, BATCH, RAM_BUFFER_SIZE, 4 );
+ }
+
public void testTransactionParameters() throws Exception {
- FullTextSession fullTextSession = Search.createFullTextSession( openSession() );
- SearchFactoryImpl searchFactory = (SearchFactoryImpl) fullTextSession.getSearchFactory();
- LuceneIndexingParameters indexingParameters = searchFactory.getIndexingParameters(searchFactory.getDirectoryProviders(Book.class)[0]);
- assertEquals(4, (int)indexingParameters.getTransactionIndexParameters().getRamBufferSizeMB());
- assertEquals(15, (int)indexingParameters.getTransactionIndexParameters().getMaxMergeDocs());
- assertEquals(17, (int)indexingParameters.getTransactionIndexParameters().getMaxBufferedDocs());
- assertEquals(16, (int)indexingParameters.getTransactionIndexParameters().getMergeFactor());
- fullTextSession.close();
+ assertValueIsSet( Book.class, TRANSACTION, RAM_BUFFER_SIZE, 4 );
+ assertValueIsSet( Book.class, TRANSACTION, MAX_MERGE_DOCS, 15 );
+ assertValueIsSet( Book.class, TRANSACTION, MAX_BUFFERED_DOCS, 17 );
+ assertValueIsSet( Book.class, TRANSACTION, MERGE_FACTOR, 16 );
}
-
+
+ public void testDefaultKeywordOverwritesInherited() throws Exception {
+ assertValueIsDefault( Document.class, TRANSACTION, RAM_BUFFER_SIZE );
+ assertValueIsDefault( Document.class, TRANSACTION, RAM_BUFFER_SIZE );
+ }
+
protected Class[] getMappings() {
return new Class[] {
Book.class,
@@ -102,5 +91,5 @@
Document.class
};
}
-
+
}
Added: search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java (rev 0)
+++ search/trunk/src/test/org/hibernate/search/test/configuration/ShardsConfigurationTest.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -0,0 +1,92 @@
+package org.hibernate.search.test.configuration;
+
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_BUFFERED_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MAX_MERGE_DOCS;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.MERGE_FACTOR;
+import static org.hibernate.search.backend.configuration.IndexWriterSetting.RAM_BUFFER_SIZE;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.TRANSACTION;
+import static org.hibernate.search.test.configuration.ConfigurationReadTestCase.TransactionType.BATCH;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.FSDirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+import org.hibernate.search.store.RAMDirectoryProvider;
+import org.hibernate.search.test.Document;
+import org.hibernate.search.test.query.Author;
+import org.hibernate.search.test.query.Book;
+
+/**
+ * @author Sanne Grinovero
+ */
+public class ShardsConfigurationTest extends ConfigurationReadTestCase {
+
+ protected void configure(org.hibernate.cfg.Configuration cfg) {
+ super.configure( cfg );
+ //super contains these:
+// cfg.setProperty( "hibernate.search.default.transaction.merge_factor", "100" );
+// cfg.setProperty( "hibernate.search.default.batch.max_buffered_docs", "1000" );
+ cfg.setProperty( "hibernate.search.default.sharding_strategy.nbr_of_shards", "2" );// permit this?
+ cfg.setProperty( "hibernate.search.default.directory_provider", FSDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.default.2.directory_provider", RAMDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_buffered_docs", "4" );
+ cfg.setProperty( "hibernate.search.Documents.batch.max_merge_docs", "5" );
+ cfg.setProperty( "hibernate.search.Documents.transaction.max_buffered_docs", "6" );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy.nbr_of_shards", "4" );
+ cfg.setProperty( "hibernate.search.Documents.sharding_strategy", UselessShardingStrategy.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.0.batch.max_merge_docs", "57" );
+ cfg.setProperty( "hibernate.search.Documents.0.directory_provider", RAMDirectoryProvider.class.getCanonicalName() );
+ cfg.setProperty( "hibernate.search.Documents.0.transaction.max_buffered_docs", "58" );
+ cfg.setProperty( "hibernate.search.Documents.1.batch.max_merge_docs", "11" );
+ cfg.setProperty( "hibernate.search.Documents.1.transaction.max_buffered_docs", "12" );
+ }
+
+ public void testCorrectNumberOfShardsDetected() throws Exception {
+ DirectoryProvider[] docDirProviders = getSearchFactory().getDirectoryProviders(Document.class);
+ assertNotNull(docDirProviders);
+ assertEquals(4, docDirProviders.length);
+ DirectoryProvider[] bookDirProviders = getSearchFactory().getDirectoryProviders(Book.class);
+ assertNotNull(bookDirProviders);
+ assertEquals(2, bookDirProviders.length);
+ }
+
+ public void testSelectionOfShardingStrategy() throws Exception {
+ IndexShardingStrategy shardingStrategy = getSearchFactory().getDocumentBuilders().get(Document.class).getDirectoryProviderSelectionStrategy();
+ assertNotNull( shardingStrategy );
+ assertEquals( shardingStrategy.getClass(), UselessShardingStrategy.class );
+ }
+
+ public void testShardingSettingsInherited() throws Exception {
+ DirectoryProvider[] docDirProviders = getSearchFactory().getDirectoryProviders(Document.class);
+ assertTrue( docDirProviders[0] instanceof RAMDirectoryProvider );
+ assertTrue( docDirProviders[1] instanceof FSDirectoryProvider );
+ assertTrue( docDirProviders[2] instanceof RAMDirectoryProvider );
+ }
+
+ public void testShardN2UsesDefaults() throws Exception {
+ assertValueIsSet( Document.class, 2, TRANSACTION, MAX_BUFFERED_DOCS, 6);
+ assertValueIsDefault( Document.class, 2, TRANSACTION, MAX_MERGE_DOCS );
+ assertValueIsSet( Document.class, 2, TRANSACTION, MERGE_FACTOR, 100 );
+ assertValueIsDefault( Document.class, 2, TRANSACTION, RAM_BUFFER_SIZE );
+ assertValueIsSet( Document.class, 2, BATCH, MAX_BUFFERED_DOCS, 4 );
+ assertValueIsSet( Document.class, 2, BATCH, MAX_MERGE_DOCS, 5 );
+ assertValueIsSet( Document.class, 2, BATCH, MERGE_FACTOR, 100 );
+ assertValueIsDefault( Document.class, 2, BATCH, RAM_BUFFER_SIZE );
+ }
+
+ public void testShardN1_ExplicitParams() throws Exception {
+ assertValueIsSet( Document.class, 1, TRANSACTION, MAX_BUFFERED_DOCS, 12 );
+ assertValueIsSet( Document.class, 1, BATCH, MAX_MERGE_DOCS, 11 );
+ }
+
+ public void testShard_BatchInheritedFromTransaction() throws Exception {
+ assertValueIsSet( Document.class, 1, BATCH, MAX_BUFFERED_DOCS, 12 );
+ assertValueIsSet( Document.class, 0, BATCH, MAX_BUFFERED_DOCS, 58 );
+ }
+
+ protected Class[] getMappings() {
+ return new Class[] {
+ Book.class,
+ Author.class,
+ Document.class
+ };
+ }
+}
Added: search/trunk/src/test/org/hibernate/search/test/configuration/UselessShardingStrategy.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/configuration/UselessShardingStrategy.java (rev 0)
+++ search/trunk/src/test/org/hibernate/search/test/configuration/UselessShardingStrategy.java 2008-04-10 19:32:31 UTC (rev 14501)
@@ -0,0 +1,31 @@
+package org.hibernate.search.test.configuration;
+
+import java.io.Serializable;
+import java.util.Properties;
+
+import org.apache.lucene.document.Document;
+import org.hibernate.search.store.DirectoryProvider;
+import org.hibernate.search.store.IndexShardingStrategy;
+
+/**
+ * Used to test the configuration of a third-party strategy
+ * @author Sanne Grinovero
+ */
+public class UselessShardingStrategy implements IndexShardingStrategy {
+
+ public DirectoryProvider getDirectoryProviderForAddition(Class entity, Serializable id, String idInString, Document document) {
+ return null;
+ }
+
+ public DirectoryProvider[] getDirectoryProvidersForAllShards() {
+ return null;
+ }
+
+ public DirectoryProvider[] getDirectoryProvidersForDeletion(Class entity, Serializable id, String idInString) {
+ return null;
+ }
+
+ public void initialize(Properties properties, DirectoryProvider[] providers) {
+ }
+
+}
16 years, 10 months
Hibernate SVN: r14500 - core/tags.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-04-09 08:06:41 -0400 (Wed, 09 Apr 2008)
New Revision: 14500
Removed:
core/tags/hibernate-3.3.0.CR1/
Log:
cleanup another bad release
16 years, 10 months
Hibernate SVN: r14499 - core/trunk/cache-jbosscache2.
by hibernate-commits@lists.jboss.org
Author: bstansberry(a)jboss.com
Date: 2008-04-07 22:35:25 -0400 (Mon, 07 Apr 2008)
New Revision: 14499
Modified:
core/trunk/cache-jbosscache2/pom.xml
Log:
[HHH-3141] Move to JBC 2.1.1.CR1.
Modified: core/trunk/cache-jbosscache2/pom.xml
===================================================================
--- core/trunk/cache-jbosscache2/pom.xml 2008-04-08 02:33:26 UTC (rev 14498)
+++ core/trunk/cache-jbosscache2/pom.xml 2008-04-08 02:35:25 UTC (rev 14499)
@@ -25,7 +25,7 @@
<dependency>
<groupId>org.jboss.cache</groupId>
<artifactId>jbosscache-core</artifactId>
- <version>2.1.1-SNAPSHOT</version>
+ <version>2.1.1.CR1</version>
</dependency>
<!-- test dependencies -->
16 years, 10 months