Hibernate SVN: r15705 - in validator/trunk: hibernate-validator and 13 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-12-18 11:21:24 -0500 (Thu, 18 Dec 2008)
New Revision: 15705
Added:
validator/trunk/tck-utils/
validator/trunk/tck-utils/pom.xml
validator/trunk/tck-utils/src/
validator/trunk/tck-utils/src/main/
validator/trunk/tck-utils/src/main/java/
validator/trunk/tck-utils/src/main/java/org/
validator/trunk/tck-utils/src/main/java/org/hibernate/
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessor.java
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessorFactory.java
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecAssertion.java
validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecVersion.java
validator/trunk/tck-utils/src/main/resources/
validator/trunk/tck-utils/src/main/resources/META-INF/
validator/trunk/tck-utils/src/main/resources/META-INF/services/
validator/trunk/tck-utils/src/main/resources/META-INF/services/com.sun.mirror.apt.AnnotationProcessorFactory
validator/trunk/tck-utils/src/test/
validator/trunk/tck-utils/src/test/java/
Modified:
validator/trunk/hibernate-validator/pom.xml
validator/trunk/hibernate-validator/src/test/java/org/hibernate/validation/engine/ValidatorImplTest.java
validator/trunk/pom.xml
Log:
added an apt based tool to process TCK related annotations.
Modified: validator/trunk/hibernate-validator/pom.xml
===================================================================
--- validator/trunk/hibernate-validator/pom.xml 2008-12-18 13:24:48 UTC (rev 15704)
+++ validator/trunk/hibernate-validator/pom.xml 2008-12-18 16:21:24 UTC (rev 15705)
@@ -38,6 +38,11 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>tck-utils</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<resources>
@@ -46,6 +51,22 @@
<filtering>true</filtering>
</resource>
</resources>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>apt-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-process</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <testOutputDirectory>${project.build.directory}/site</testOutputDirectory>
+ </configuration>
+ </plugin>
+ </plugins>
</build>
<reporting>
<plugins>
@@ -80,9 +101,9 @@
<additionalparam>
-d ${project.build.directory}/site
</additionalparam>
- <!-- Other dir than apidocs -->
+ <!--Other dir than apidocs-->
<destDir>tck</destDir>
- <!-- For the project-reports page-->
+ <!--For the project-reports page-->
<name>JSR Tests</name>
<description>Cross references unit tests to JSR 303 specification.</description>
</configuration>
Modified: validator/trunk/hibernate-validator/src/test/java/org/hibernate/validation/engine/ValidatorImplTest.java
===================================================================
--- validator/trunk/hibernate-validator/src/test/java/org/hibernate/validation/engine/ValidatorImplTest.java 2008-12-18 13:24:48 UTC (rev 15704)
+++ validator/trunk/hibernate-validator/src/test/java/org/hibernate/validation/engine/ValidatorImplTest.java 2008-12-18 16:21:24 UTC (rev 15705)
@@ -47,6 +47,7 @@
import org.hibernate.validation.eg.Last;
import org.hibernate.validation.eg.DefaultAlias;
import org.hibernate.validation.HibernateValidatorFactoryBuilder;
+import org.hibernate.tck.annotations.SpecAssertion;
/**
* Tests for the implementation of <code>Validator</code>.
@@ -73,6 +74,7 @@
* JSR 303: Requirements on classes to be validates (3.1)
* @jsr 3.1
*/
+ @SpecAssertion( section = {"3.1"} )
@Test
public void testWrongMethodName() {
try {
Modified: validator/trunk/pom.xml
===================================================================
--- validator/trunk/pom.xml 2008-12-18 13:24:48 UTC (rev 15704)
+++ validator/trunk/pom.xml 2008-12-18 16:21:24 UTC (rev 15705)
@@ -49,6 +49,7 @@
<module>validation-api</module>
<module>hibernate-validator</module>
<module>hibernate-validator-legacy</module>
+ <module>tck-utils</module>
</modules>
<dependencyManagement>
@@ -78,6 +79,11 @@
<artifactId>junit</artifactId>
<version>4.4</version>
</dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>tck-utils</artifactId>
+ <version>${version}</version>
+ </dependency>
</dependencies>
</dependencyManagement>
Property changes on: validator/trunk/tck-utils
___________________________________________________________________
Name: svn:ignore
+ target
*.iml
Added: validator/trunk/tck-utils/pom.xml
===================================================================
--- validator/trunk/tck-utils/pom.xml (rev 0)
+++ validator/trunk/tck-utils/pom.xml 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-validator-parent</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <groupId>org.hibernate</groupId>
+ <artifactId>tck-utils</artifactId>
+ <packaging>jar</packaging>
+ <name>TCK Utils</name>
+
+ <distributionManagement>
+ <site>
+ <id>site</id>
+ <url>file:///Users/hardy/Sites/${artifactId}</url>
+ </site>
+ </distributionManagement>
+</project>
Added: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessor.java
===================================================================
--- validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessor.java (rev 0)
+++ validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessor.java 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1,177 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.tck;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.sun.mirror.apt.AnnotationProcessor;
+import com.sun.mirror.apt.AnnotationProcessorEnvironment;
+import com.sun.mirror.declaration.AnnotationTypeDeclaration;
+import com.sun.mirror.declaration.Declaration;
+import com.sun.mirror.declaration.MethodDeclaration;
+import static com.sun.mirror.util.DeclarationVisitors.NO_OP;
+import static com.sun.mirror.util.DeclarationVisitors.getDeclarationScanner;
+import com.sun.mirror.util.SimpleDeclarationVisitor;
+
+import org.hibernate.tck.annotations.SpecAssertion;
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class TCKAnnotationProcessor implements AnnotationProcessor {
+
+ private static final String OUTDIR_OPTION_NAME = "-s";
+ private static final String REPORT_FILE_NAME = "tck.html";
+
+ private final AnnotationProcessorEnvironment env;
+ private final String[] tableHeaders = new String[] { "Section", "Class", "Method" };
+ private final StringBuffer out = new StringBuffer();
+ private final List<JSRReference> references = new ArrayList<JSRReference>();
+ private final File baseDir;
+
+ public TCKAnnotationProcessor(AnnotationProcessorEnvironment annotationProcessorEnvironment) {
+ this.env = annotationProcessorEnvironment;
+ String baseDirName = env.getOptions().get( OUTDIR_OPTION_NAME );
+ baseDir = new File( baseDirName );
+ baseDir.mkdirs();
+ }
+
+ public void process() {
+
+
+ AnnotationTypeDeclaration annType = ( AnnotationTypeDeclaration ) env.getTypeDeclaration(
+ SpecAssertion.class.getCanonicalName()
+ );
+ for ( Declaration d : env.getDeclarationsAnnotatedWith( annType ) ) {
+ d.accept(
+ getDeclarationScanner(
+ new DoNothingVisitor(),
+ NO_OP
+ )
+ );
+ }
+
+
+ writeHeader();
+ writeContents();
+ writeFooter();
+
+ writeReporttoFile();
+ }
+
+ private void writeReporttoFile() {
+ try {
+ File report = new File( baseDir, REPORT_FILE_NAME );
+ BufferedWriter writer = new BufferedWriter( new FileWriter( report ) );
+ writer.write( out.toString() );
+ writer.close();
+ }
+ catch ( IOException e ) {
+ System.err.println( "Error writing report." );
+ }
+ }
+
+ private void writeFooter() {
+ out.append( "</body></html>" );
+ }
+
+ private void writeHeader() {
+ out.append( "<html><head></head><body>" );
+ }
+
+ private void writeTableHeader() {
+ out.append( "<table border=\"1\"><tr>" );
+ for ( String s : tableHeaders ) {
+ out.append( "<th>" ).append( s ).append( "</th>" );
+ }
+ out.append( "</tr>" );
+ }
+
+ private void writeTableFooter() {
+ out.append( "</table>" );
+ }
+
+ private void writeContents() {
+ writeTableHeader();
+ for ( JSRReference reference : references ) {
+ out.append( "<tr>" );
+ out.append( "<td>" ).append( reference.jsrSectionReference ).append( "</td>" );
+ out.append( "<td><a href=\"" )
+ .append( reference.getSourceLink() )
+ .append( "\">" )
+ .append( reference.className )
+ .append( "</a></td>" );
+ out.append( "<td>" ).append( reference.methodName ).append( "</td>" );
+ out.append( "</tr>" );
+ }
+ writeTableFooter();
+ }
+
+ private class DoNothingVisitor extends SimpleDeclarationVisitor {
+ public void visitMethodDeclaration(MethodDeclaration d) {
+ SpecAssertion annotation = d.getAnnotation( SpecAssertion.class );
+ JSRReference ref = new JSRReference(
+ annotation.section()[0], d.getDeclaringType().getQualifiedName(), d.getSimpleName()
+ );
+ references.add( ref );
+ }
+ }
+
+ private static class JSRReference implements Comparable {
+ /**
+ * The JSR section this instance references.
+ */
+ String jsrSectionReference;
+
+ /**
+ * The name of the class which references the JSR.
+ */
+ String className;
+
+ /**
+ * The method which references the JSR.
+ */
+ String methodName;
+
+ /**
+ * @todo Add some validation
+ */
+ JSRReference(String reference, String className, String methodName) {
+ this.jsrSectionReference = reference;
+ this.className = className;
+ this.methodName = methodName;
+ }
+
+ public String getSourceLink() {
+ StringBuilder builder = new StringBuilder();
+ builder.append( "xref-test/" );
+ builder.append( className.replace( '.', '/' ) );
+ builder.append( ".html" );
+ return builder.toString();
+ }
+
+ public int compareTo(Object o) {
+ return jsrSectionReference.compareTo( ( ( JSRReference ) o ).jsrSectionReference );
+ }
+ }
+}
Property changes on: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessor.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessorFactory.java
===================================================================
--- validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessorFactory.java (rev 0)
+++ validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessorFactory.java 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1,65 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.tck;
+
+import java.util.Arrays;
+import java.util.Collection;
+import static java.util.Collections.emptySet;
+import static java.util.Collections.unmodifiableCollection;
+import java.util.Set;
+
+import com.sun.mirror.apt.AnnotationProcessor;
+import com.sun.mirror.apt.AnnotationProcessorEnvironment;
+import com.sun.mirror.apt.AnnotationProcessorFactory;
+import com.sun.mirror.declaration.AnnotationTypeDeclaration;
+
+import org.hibernate.tck.annotations.SpecAssertion;
+import org.hibernate.tck.annotations.SpecVersion;
+
+
+/**
+ * @author Hardy Ferentschik
+ */
+public class TCKAnnotationProcessorFactory implements AnnotationProcessorFactory {
+
+ // Process any set of annotations
+ private static final Collection<String> supportedAnnotations
+ = unmodifiableCollection(
+ Arrays.asList(
+ SpecAssertion.class.getCanonicalName(),
+ SpecVersion.class.getCanonicalName()
+ )
+ );
+
+ // No supported options
+ private static final Collection<String> supportedOptions = emptySet();
+
+
+ public Collection<String> supportedOptions() {
+ return supportedOptions;
+ }
+
+ public Collection<String> supportedAnnotationTypes() {
+ return supportedAnnotations;
+ }
+
+ public AnnotationProcessor getProcessorFor(Set<AnnotationTypeDeclaration> annotationTypeDeclarations, AnnotationProcessorEnvironment annotationProcessorEnvironment) {
+ return new TCKAnnotationProcessor( annotationProcessorEnvironment );
+
+ }
+}
Property changes on: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/TCKAnnotationProcessorFactory.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecAssertion.java
===================================================================
--- validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecAssertion.java (rev 0)
+++ validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecAssertion.java 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1,33 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.tck.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+(a)Target(ElementType.METHOD)
+@Documented
+public @interface SpecAssertion {
+
+ public String[] section();
+
+ public String note() default "";
+
+}
+
Property changes on: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecAssertion.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecVersion.java
===================================================================
--- validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecVersion.java (rev 0)
+++ validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecVersion.java 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1,30 @@
+// $Id$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.tck.annotations;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+@Documented
+(a)Target(ElementType.TYPE)
+public @interface SpecVersion {
+
+ String value();
+}
+
Property changes on: validator/trunk/tck-utils/src/main/java/org/hibernate/tck/annotations/SpecVersion.java
___________________________________________________________________
Name: svn:keywords
+ Id
Added: validator/trunk/tck-utils/src/main/resources/META-INF/services/com.sun.mirror.apt.AnnotationProcessorFactory
===================================================================
--- validator/trunk/tck-utils/src/main/resources/META-INF/services/com.sun.mirror.apt.AnnotationProcessorFactory (rev 0)
+++ validator/trunk/tck-utils/src/main/resources/META-INF/services/com.sun.mirror.apt.AnnotationProcessorFactory 2008-12-18 16:21:24 UTC (rev 15705)
@@ -0,0 +1 @@
+org.hibernate.tck.TCKAnnotationProcessorFactory
\ No newline at end of file
17 years, 4 months
Hibernate SVN: r15704 - in search/trunk/src/java/org/hibernate/search/backend: impl/lucene and 1 other directories.
by hibernate-commits@lists.jboss.org
Author: sannegrinovero
Date: 2008-12-18 08:24:48 -0500 (Thu, 18 Dec 2008)
New Revision: 15704
Modified:
search/trunk/src/java/org/hibernate/search/backend/Workspace.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
Log:
HSEARCH-326 Drop support for IndexReader usage to update indexes. This also removes more unneeded Locks.
Modified: search/trunk/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -8,9 +8,7 @@
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.store.Directory;
import org.slf4j.Logger;
import org.hibernate.annotations.common.AssertionFailure;
@@ -24,13 +22,8 @@
/**
* Lucene workspace for a DirectoryProvider.<p/>
- * <ul>
- * <li>Before using {@link #getIndexWriter} or {@link #getIndexReader} the lock must be acquired,
- * and resources must be closed before releasing the lock.</li>
- * <li>One cannot get an IndexWriter when an IndexReader has been acquired and not closed, and vice-versa.</li>
- * <li>The recommended approach is to execute all the modifications on the <code>IndexReader</code>, and after that on
- * the <code>IndexWriter</code></li>.
- * </ul>
+ * Before using {@link #getIndexWriter} the lock must be acquired,
+ * and resources must be closed before releasing the lock.
*
* @author Emmanuel Bernard
* @author Hardy Ferentschik
@@ -41,11 +34,13 @@
private static final Logger log = LoggerFactory.make();
private static final Analyzer SIMPLE_ANALYZER = new SimpleAnalyzer();
+ private static final IndexWriter.MaxFieldLength maxFieldLength =
+ new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
// invariant state:
private final SearchFactoryImplementor searchFactoryImplementor;
- private final DirectoryProvider directoryProvider;
+ private final DirectoryProvider<?> directoryProvider;
private final OptimizerStrategy optimizerStrategy;
private final ReentrantLock lock;
private final Set<Class<?>> entitiesInDirectory;
@@ -54,11 +49,6 @@
// variable state:
/**
- * Current open IndexReader, or null when closed. Guarded by synchronization.
- */
- private IndexReader reader;
-
- /**
* Current open IndexWriter, or null when closed. Guarded by synchronization.
*/
private IndexWriter writer;
@@ -88,13 +78,16 @@
/**
* If optimization has not been forced give a change to configured OptimizerStrategy
* to optimize the index.
- * @throws AssertionFailure if the lock is not owned or if an IndexReader is open.
+ * To enter the optimization phase you need to acquire the lock first.
+ * @throws AssertionFailure if the lock is not owned.
*/
public void optimizerPhase() {
assertOwnLock();
// used getAndSet(0) because Workspace is going to be reused by next transaction.
- optimizerStrategy.addTransaction( operations.getAndSet( 0L ) );
- optimizerStrategy.optimize( this );
+ synchronized (optimizerStrategy) {
+ optimizerStrategy.addTransaction( operations.getAndSet( 0L ) );
+ optimizerStrategy.optimize( this );
+ }
}
/**
@@ -105,79 +98,25 @@
* @see SearchFactory#optimize(Class)
*/
public void optimize() {
- assertOwnLock(); // the DP is not affected, but needs to ensure the optimizerStrategy is accesses in threadsafe way
- optimizerStrategy.optimizationForced();
- }
-
- /**
- * Gets an IndexReader to alter the index, opening one if needed.
- * The caller needs to own the lock relevant to this DirectoryProvider.
- * @throws AssertionFailure if an IndexWriter is open or if the lock is not owned.
- * @return a new IndexReader or one already open.
- * @see #lock()
- */
- public synchronized IndexReader getIndexReader() {
- assertOwnLock();
- // one cannot access a reader for update while a writer is in use
- if ( writer != null )
- throw new AssertionFailure( "Tries to read for update an index while a writer is in use." );
- if ( reader != null )
- return reader;
- Directory directory = directoryProvider.getDirectory();
- try {
- reader = IndexReader.open( directory, false );
- log.trace( "IndexReader opened" );
+ // Needs to ensure the optimizerStrategy is accessed in threadsafe way
+ synchronized (optimizerStrategy) {
+ optimizerStrategy.optimizationForced();
}
- catch ( IOException e ) {
- reader = null;
- throw new SearchException( "Unable to open IndexReader on directory " + directory, e );
- }
- return reader;
}
/**
- * Closes a previously opened IndexReader.
- * @throws SearchException on IOException during Lucene close operation.
- * @throws AssertionFailure if the lock is not owned or if there is no IndexReader to close.
- * @see #getIndexReader()
- */
- public synchronized void closeIndexReader() {
- assertOwnLock();
- IndexReader toClose = reader;
- reader = null;
- if ( toClose != null ) {
- try {
- toClose.close();
- log.trace( "IndexReader closed" );
- }
- catch ( IOException e ) {
- throw new SearchException( "Exception while closing IndexReader", e );
- }
- }
- else {
- throw new AssertionFailure( "No IndexReader open to close." );
- }
- }
-
- /**
* Gets the IndexWriter, opening one if needed.
* @param batchmode when true the indexWriter settings for batch mode will be applied.
* Ignored if IndexWriter is open already.
- * @throws AssertionFailure if an IndexReader is open or the lock is not owned.
+ * @throws AssertionFailure if the lock is not owned.
* @throws SearchException on a IOException during index opening.
* @return a new IndexWriter or one already open.
*/
public synchronized IndexWriter getIndexWriter(boolean batchmode) {
- assertOwnLock();
- // one has to close a reader for update before a writer is accessed
- if ( reader != null )
- throw new AssertionFailure( "Tries to open an IndexWriter while an IndexReader is open in update mode." );
if ( writer != null )
return writer;
try {
- // don't care about the Analyzer as it will be selected during usage of IndexWriter.
- IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
- writer = new IndexWriter( directoryProvider.getDirectory(), SIMPLE_ANALYZER, false, fieldLength ); // has been created at init time
+ writer = new IndexWriter( directoryProvider.getDirectory(), SIMPLE_ANALYZER, false, maxFieldLength ); // has been created at init time
indexingParams.applyToWriter( writer, batchmode );
log.trace( "IndexWriter opened" );
}
@@ -189,13 +128,12 @@
}
/**
- * Commits changes to a previously opened index writer.
+ * Commits changes to a previously opened IndexWriter.
*
* @throws SearchException on IOException during Lucene close operation.
- * @throws AssertionFailure if there is no IndexWriter to close, or if the lock is not owned.
+ * @throws AssertionFailure if there is no IndexWriter to close.
*/
public synchronized void commitIndexWriter() {
- assertOwnLock();
if ( writer != null ) {
try {
writer.commit();
@@ -213,10 +151,9 @@
/**
* Closes a previously opened IndexWriter.
* @throws SearchException on IOException during Lucene close operation.
- * @throws AssertionFailure if there is no IndexWriter to close, or if the lock is not owned.
+ * @throws AssertionFailure if there is no IndexWriter to close.
*/
public synchronized void closeIndexWriter() {
- assertOwnLock();
IndexWriter toClose = writer;
writer = null;
if ( toClose != null ) {
@@ -252,34 +189,19 @@
/**
* Acquires a lock on the DirectoryProvider backing this Workspace;
- * this is required to use getIndexWriter(boolean), closeIndexWriter(),
- * getIndexReader(), closeIndexReader().
- * @see #getIndexWriter(boolean)
- * @see #closeIndexWriter()
- * @see #getIndexReader()
- * @see #closeIndexReader()
+ * this is required to use optimizerPhase()
+ * @see #optimizerPhase()
*/
public void lock() {
lock.lock();
}
/**
- * Releases the lock obtained by calling lock()
- * @throws AssertionFailure when unlocking without having closed IndexWriter or IndexReader.
+ * Releases the lock obtained by calling lock(). The caller must own the lock.
* @see #lock()
*/
- public synchronized void unlock() {
- try {
- if ( this.reader != null ) {
- throw new AssertionFailure( "Unlocking Workspace without having closed the IndexReader" );
- }
- if ( this.writer != null ) {
- throw new AssertionFailure( "Unlocking Workspace without having closed the IndexWriter" );
- }
- }
- finally {
- lock.unlock();
- }
+ public void unlock() {
+ lock.unlock();
}
private void assertOwnLock() {
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -4,17 +4,20 @@
import java.util.List;
import java.util.concurrent.ExecutorService;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.slf4j.Logger;
-import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
import org.hibernate.search.backend.impl.lucene.works.LuceneWorkVisitor;
import org.hibernate.search.util.LoggerFactory;
/**
+ * A Runnable containing a unit of changes to be applied to a specific index.
+ * After creation, use addWork(LuceneWork) to fill the changes queue and then
+ * run it to apply all changes. After run() this object should be discarded.
+ * @see Runnable
+ * @see #addWork(LuceneWork)
* @author Sanne Grinovero
*/
class PerDPQueueProcessor implements Runnable {
@@ -24,79 +27,36 @@
private final LuceneWorkVisitor worker;
private final ExecutorService executor;
private final List<LuceneWork> workOnWriter = new ArrayList<LuceneWork>();
- private final List<LuceneWork> workOnReader= new ArrayList<LuceneWork>();
- // if any work passed to addWork needs one, set corresponding flag to true:
+ // if any work needs batchmode, set corresponding flag to true:
private boolean batchmode = false;
- private boolean needsWriter = false;
- private boolean preferReader = false;
+ /**
+ * @param resources All resources for the given DirectoryProvider are collected
+ * from this wrapping object.
+ */
public PerDPQueueProcessor(PerDPResources resources) {
this.worker = resources.getVisitor();
this.workspace = resources.getWorkspace();
this.executor = resources.getExecutor();
}
+ /**
+ * adds a LuceneWork to the internal queue. Can't remove them.
+ * @param work
+ */
public void addWork(LuceneWork work) {
if ( work.isBatch() ) {
batchmode = true;
log.debug( "Batch mode enabled" );
}
- IndexInteractionType type = work.getWorkDelegate( worker ).getIndexInteractionType();
- switch ( type ) {
- case PREFER_INDEXREADER :
- preferReader = true;
- workOnReader.add( work );
- break;
- case NEEDS_INDEXWRITER :
- needsWriter = true;
- //fall through:
- case PREFER_INDEXWRITER :
- workOnWriter.add( work );
- break;
- default :
- throw new AssertionFailure( "Uncovered switch case for type " + type );
- }
+ workOnWriter.add( work );
}
- public void run() {
- // skip "resource optimization mode" when in batch to have all tasks use preferred (optimal) mode.
- if ( ! batchmode ) {
- // see if we can skip using some resource
- if ( ! needsWriter ) { // no specific need:
- if ( preferReader ) {
- useReaderOnly();
- }
- else {
- useWriterOnly();
- }
- }
- else {
- useWriterOnly();
- }
- if ( ! (workOnWriter.isEmpty() || workOnReader.isEmpty() ) ) {
- throw new AssertionFailure(
- "During non-batch mode performWorks tries to use both IndexWriter and IndexReader." );
- }
- }
- // apply changes to index:
- log.trace( "Locking Workspace (or waiting to...)" );
- workspace.lock();
- log.trace( "Workspace lock aquired." );
- try {
- performReaderWorks();
- performWriterWorks();
- }
- finally {
- workspace.unlock();
- log.trace( "Unlocking Workspace" );
- }
- }
-
/**
* Do all workOnWriter on an IndexWriter.
*/
- private void performWriterWorks() {
+ public void run() {
if ( workOnWriter.isEmpty() ) {
return;
}
@@ -107,56 +67,37 @@
lw.getWorkDelegate( worker ).performWork( lw, indexWriter );
}
workspace.commitIndexWriter();
- //TODO next line is assuming the OptimizerStrategy will need an IndexWriter;
- // would be nicer to have the strategy put an OptimizeWork on the queue,
- // or just return "yes please" (true) to some method?
- //FIXME will not have a chance to trigger when no writer activity is done.
- // this is currently ok, until we enable mod.counts for deletions too.
- workspace.optimizerPhase();
+ //TODO skip this when indexing in batches:
+ performOptimizations();
}
finally {
workspace.closeIndexWriter();
}
}
-
- /**
- * Do all workOnReader on an IndexReader.
- */
- private void performReaderWorks() {
- if ( workOnReader.isEmpty() ) {
- return;
- }
- log.debug( "Opening an IndexReader for update" );
- IndexReader indexReader = workspace.getIndexReader();
+
+ private void performOptimizations() {
+ log.trace( "Locking Workspace (or waiting to...)" );
+ workspace.lock();
try {
- for (LuceneWork lw : workOnReader) {
- lw.getWorkDelegate( worker ).performWork( lw, indexReader );
- }
+ log.trace( "Workspace lock aquired." );
+ //TODO next line is assuming the OptimizerStrategy will need an IndexWriter;
+ // would be nicer to have the strategy put an OptimizeWork on the queue,
+ // or just return "yes please" (true) to some method?
+ //FIXME will not have a chance to trigger when no "add" activity is done.
+ // this is correct until we enable modification counts for deletions too.
+ workspace.optimizerPhase();
}
finally {
- workspace.closeIndexReader();
+ workspace.unlock();
+ log.trace( "Unlocked Workspace" );
}
}
/**
- * forces all work to be done using only an IndexReader
+ * Each PerDPQueueProcessor is owned by an Executor,
+ * which contains the threads allowed to execute this.
+ * @return the Executor which should run this Runnable.
*/
- private void useReaderOnly() {
- log.debug( "Skipping usage of an IndexWriter for updates" );
- workOnReader.addAll( workOnWriter );
- workOnWriter.clear();
- }
-
- /**
- * forces all work to be done using only an IndexWriter
- */
- private void useWriterOnly() {
- log.debug( "Skipping usage of an IndexReader for updates" );
- //position 0 needed to maintain correct ordering of Work: delete operations first.
- workOnWriter.addAll( 0, workOnReader );
- workOnReader.clear();
- }
-
public ExecutorService getOwningExecutor() {
return executor;
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/AddWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -4,7 +4,6 @@
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.Similarity;
import org.slf4j.Logger;
@@ -13,7 +12,6 @@
import org.hibernate.search.backend.AddLuceneWork;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
import org.hibernate.search.util.ScopedAnalyzer;
@@ -38,13 +36,10 @@
this.workspace = workspace;
}
- public IndexInteractionType getIndexInteractionType() {
- return IndexInteractionType.NEEDS_INDEXWRITER;
- }
-
public void performWork(LuceneWork work, IndexWriter writer) {
+ final Class<?> entityType = work.getEntityClass();
@SuppressWarnings("unchecked")
- DocumentBuilderIndexedEntity documentBuilder = workspace.getDocumentBuilder( work.getEntityClass() );
+ DocumentBuilderIndexedEntity documentBuilder = workspace.getDocumentBuilder( entityType );
Map<String, String> fieldToAnalyzerMap = ( ( AddLuceneWork ) work ).getFieldToAnalyzerMap();
ScopedAnalyzer analyzer = ( ScopedAnalyzer ) documentBuilder.getAnalyzer();
analyzer = updateAnalyzerMappings( analyzer, fieldToAnalyzerMap, workspace );
@@ -52,12 +47,12 @@
if ( log.isTraceEnabled() ) {
log.trace(
"add to Lucene index: {}#{}:{}",
- new Object[] { work.getEntityClass(), work.getId(), work.getDocument() }
+ new Object[] { entityType, work.getId(), work.getDocument() }
);
}
try {
//TODO the next two operations should be atomic to enable concurrent usage of IndexWriter
- // make a wrapping Similarity based on ThreadLocals? or having it autoselect implementation basing on entity?
+ // make a wrapping Similarity based on ThreadLocals? or have it autoselect implementation basing on entity?
writer.setSimilarity( similarity );
writer.addDocument( work.getDocument(), analyzer );
workspace.incrementModificationCounter( 1 );
@@ -65,7 +60,7 @@
catch ( IOException e ) {
throw new SearchException(
"Unable to add to Lucene index: "
- + work.getEntityClass() + "#" + work.getId(), e
+ + entityType + "#" + work.getId(), e
);
}
}
@@ -100,7 +95,4 @@
return analyzerClone;
}
- public void performWork(LuceneWork work, IndexReader reader) {
- throw new UnsupportedOperationException();
- }
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteExtWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -2,14 +2,12 @@
import java.io.Serializable;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.search.SearchException;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.util.LoggerFactory;
import org.slf4j.Logger;
@@ -25,8 +23,8 @@
*/
public class DeleteExtWorkDelegate extends DeleteWorkDelegate {
- private final Class managedType;
- private final DocumentBuilderIndexedEntity builder;
+ private final Class<?> managedType;
+ private final DocumentBuilderIndexedEntity<?> builder;
private final Logger log = LoggerFactory.make();
DeleteExtWorkDelegate(Workspace workspace) {
@@ -39,13 +37,6 @@
}
@Override
- public IndexInteractionType getIndexInteractionType() {
- // no particular reason to prefer Reader, just it's possibly more tested
- // as using the writer is an option of latest Lucene version only (2.4).
- return IndexInteractionType.PREFER_INDEXREADER;
- }
-
- @Override
public void performWork(LuceneWork work, IndexWriter writer) {
checkType( work );
Serializable id = work.getId();
@@ -60,21 +51,6 @@
}
}
- @Override
- public void performWork(LuceneWork work, IndexReader reader) {
- checkType( work );
- Serializable id = work.getId();
- log.trace( "Removing {}#{} by id using an IndexReader.", managedType, id );
- Term idTerm = builder.getTerm( id );
- try {
- reader.deleteDocuments( idTerm );
- }
- catch ( Exception e ) {
- String message = "Unable to remove " + managedType + "#" + id + " from index.";
- throw new SearchException( message, e );
- }
- }
-
private void checkType(final LuceneWork work) {
if ( work.getEntityClass() != managedType ) {
throw new AssertionFailure( "Unexpected type" );
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/DeleteWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -1,11 +1,9 @@
package org.hibernate.search.backend.impl.lucene.works;
-import java.io.IOException;
+import java.io.Serializable;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermDocs;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.TermQuery;
@@ -14,7 +12,6 @@
import org.hibernate.search.SearchException;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
import org.hibernate.search.engine.DocumentBuilderIndexedEntity;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.util.LoggerFactory;
@@ -38,18 +35,15 @@
this.workspace = workspace;
}
- public IndexInteractionType getIndexInteractionType() {
- return IndexInteractionType.PREFER_INDEXWRITER;
- }
-
public void performWork(LuceneWork work, IndexWriter writer) {
final Class<?> entityType = work.getEntityClass();
- log.trace( "Removing {}#{} by query.", entityType, work.getId() );
+ final Serializable id = work.getId();
+ log.trace( "Removing {}#{} by query.", entityType, id );
DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder( entityType );
BooleanQuery entityDeletionQuery = new BooleanQuery();
- TermQuery idQueryTerm = new TermQuery( builder.getTerm( work.getId() ) );
+ TermQuery idQueryTerm = new TermQuery( builder.getTerm( id ) );
entityDeletionQuery.add( idQueryTerm, BooleanClause.Occur.MUST );
Term classNameQueryTerm = new Term( DocumentBuilder.CLASS_FIELDNAME, entityType.getName() );
@@ -60,59 +54,9 @@
writer.deleteDocuments( entityDeletionQuery );
}
catch ( Exception e ) {
- String message = "Unable to remove " + entityType + "#" + work.getId() + " from index.";
+ String message = "Unable to remove " + entityType + "#" + id + " from index.";
throw new SearchException( message, e );
}
}
- /*
- * This method is obsolete and was used pre Lucene 2.4. Now we are using IndexWriter.deleteDocuments(Query) to
- * delete index documents.
- *
- * This method might be deleted at some stage. (hardy)
- */
- public void performWork(LuceneWork work, IndexReader reader) {
- /**
- * even with Lucene 2.1, use of indexWriter to delete is not an option
- * We can only delete by term, and the index doesn't have a term that
- * uniquely identify the entry. See logic below
- */
- final Class<?> entityType = work.getEntityClass();
- log.trace( "Removing {}#{} from Lucene index.", entityType, work.getId() );
- DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder( entityType );
- Term term = builder.getTerm( work.getId() );
- TermDocs termDocs = null;
- try {
- //TODO is there a faster way?
- //TODO include TermDocs into the workspace?
- termDocs = reader.termDocs( term );
- String entityName = entityType.getName();
- while ( termDocs.next() ) {
- int docIndex = termDocs.doc();
- if ( entityName.equals( reader.document( docIndex ).get( DocumentBuilder.CLASS_FIELDNAME ) ) ) {
- //remove only the one of the right class
- //loop all to remove all the matches (defensive code)
- reader.deleteDocument( docIndex );
- }
- }
- //TODO shouldn't this use workspace.incrementModificationCounter( 1 ) ?
- }
- catch ( Exception e ) {
- throw new SearchException(
- "Unable to remove from Lucene index: "
- + entityType + "#" + work.getId(), e
- );
- }
- finally {
- if ( termDocs != null ) {
- try {
- termDocs.close();
- }
- catch ( IOException e ) {
- log.warn( "Unable to close termDocs properly", e );
- }
- }
- }
- }
-
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/LuceneWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -1,9 +1,7 @@
package org.hibernate.search.backend.impl.lucene.works;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.hibernate.search.backend.LuceneWork;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
/**
* @author Sanne Grinovero
@@ -11,12 +9,6 @@
public interface LuceneWorkDelegate {
/**
- * @return the IndexInteractionType needed to accomplish this work (reader or writer)
- * or have a chance to express any preference for performance optimizations.
- */
- IndexInteractionType getIndexInteractionType();
-
- /**
* Will perform work on an IndexWriter.
* @param work the LuceneWork to apply to the IndexWriter.
* @param writer the IndexWriter to use.
@@ -24,12 +16,4 @@
*/
void performWork(LuceneWork work, IndexWriter writer);
- /**
- * Will perform this work on an IndexReader.
- * @param work the LuceneWork to apply to the IndexReader.
- * @param reader the IndexReader to use.
- * @throws UnsupportedOperationException when the work is not compatible with an IndexReader.
- */
- void performWork(LuceneWork work, IndexReader reader);
-
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/OptimizeWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -2,14 +2,12 @@
import java.io.IOException;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.slf4j.Logger;
import org.hibernate.search.SearchException;
import org.hibernate.search.backend.LuceneWork;
import org.hibernate.search.backend.Workspace;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
import org.hibernate.search.util.LoggerFactory;
/**
@@ -32,23 +30,16 @@
this.workspace = workspace;
}
- public IndexInteractionType getIndexInteractionType() {
- return IndexInteractionType.NEEDS_INDEXWRITER;
- }
-
public void performWork(LuceneWork work, IndexWriter writer) {
- log.trace( "optimize Lucene index: {}", work.getEntityClass() );
+ final Class<?> entityType = work.getEntityClass();
+ log.trace( "optimize Lucene index: {}", entityType );
try {
writer.optimize();
workspace.optimize();
}
catch ( IOException e ) {
- throw new SearchException( "Unable to optimize Lucene index: " + work.getEntityClass(), e );
+ throw new SearchException( "Unable to optimize Lucene index: " + entityType, e );
}
}
- public void performWork(LuceneWork work, IndexReader reader) {
- throw new UnsupportedOperationException();
- }
-
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java 2008-12-17 14:41:24 UTC (rev 15703)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/works/PurgeAllWorkDelegate.java 2008-12-18 13:24:48 UTC (rev 15704)
@@ -1,13 +1,11 @@
package org.hibernate.search.backend.impl.lucene.works;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.slf4j.Logger;
import org.hibernate.search.SearchException;
import org.hibernate.search.backend.LuceneWork;
-import org.hibernate.search.backend.impl.lucene.IndexInteractionType;
import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.util.LoggerFactory;
@@ -27,30 +25,16 @@
PurgeAllWorkDelegate() {
}
- public IndexInteractionType getIndexInteractionType() {
- return IndexInteractionType.PREFER_INDEXREADER;
- }
-
public void performWork(LuceneWork work, IndexWriter writer) {
- log.trace( "purgeAll Lucene index using IndexWriter for type: {}", work.getEntityClass() );
+ final Class<?> entityType = work.getEntityClass();
+ log.trace( "purgeAll Lucene index using IndexWriter for type: {}", entityType );
try {
- Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, work.getEntityClass().getName() );
+ Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, entityType.getName() );
writer.deleteDocuments( term );
}
catch (Exception e) {
- throw new SearchException( "Unable to purge all from Lucene index: " + work.getEntityClass(), e );
+ throw new SearchException( "Unable to purge all from Lucene index: " + entityType, e );
}
}
- public void performWork(LuceneWork work, IndexReader reader) {
- log.trace( "purgeAll Lucene index using IndexReader for type: {}", work.getEntityClass() );
- try {
- Term term = new Term( DocumentBuilder.CLASS_FIELDNAME, work.getEntityClass().getName() );
- reader.deleteDocuments( term );
- }
- catch (Exception e) {
- throw new SearchException( "Unable to purge all from Lucene index: " + work.getEntityClass(), e );
- }
- }
-
}
17 years, 4 months