Hibernate SVN: r15547 - in search/trunk/src: java/org/hibernate/search/backend/impl/lucene and 7 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-11-11 07:57:47 -0500 (Tue, 11 Nov 2008)
New Revision: 15547
Modified:
search/trunk/src/java/org/hibernate/search/backend/Workspace.java
search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
search/trunk/src/java/org/hibernate/search/store/DirectoryProviderHelper.java
search/trunk/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
search/trunk/src/test/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java
search/trunk/src/test/org/hibernate/search/test/filter/BestDriversFilter.java
search/trunk/src/test/org/hibernate/search/test/filter/ExcludeAllFilter.java
search/trunk/src/test/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java
search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
search/trunk/src/test/org/hibernate/search/test/reader/performance/IndexFillRunnable.java
search/trunk/src/test/org/hibernate/search/test/reader/performance/ReaderPerformance.java
search/trunk/src/test/org/hibernate/search/test/util/AnalyzerUtils.java
Log:
HSEARCH-283
* Updated deprecated field constants
* changed IndexWriter constructor
Modified: search/trunk/src/java/org/hibernate/search/backend/Workspace.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/java/org/hibernate/search/backend/Workspace.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -25,11 +25,11 @@
/**
* Lucene workspace for a DirectoryProvider.<p/>
* <ul>
- * <li>Before using getIndexWriter or getIndexReader the lock must be acquired, and resources must be closed
- * before releasing the lock.</li>
+ * <li>Before using {@link #getIndexWriter} or {@link #getIndexReader} the lock must be acquired,
+ * and resources must be closed before releasing the lock.</li>
* <li>One cannot get an IndexWriter when an IndexReader has been acquired and not closed, and vice-versa.</li>
- * <li>The recommended approach is to execute all the modifications on the IndexReader, and after that on
- * the IndexWriter</li>
+ * <li>The recommended approach is to execute all the modifications on the <code>IndexReader</code>, and after that on
+ * the <code>IndexWriter</code></li>.
* </ul>
*
* @author Emmanuel Bernard
@@ -173,8 +173,8 @@
return writer;
try {
// don't care about the Analyzer as it will be selected during usage of IndexWriter.
- //FIXME use the non deprecated constructor => requires to call #Commit()
- writer = new IndexWriter( directoryProvider.getDirectory(), SIMPLE_ANALYZER, false ); // has been created at init time
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ writer = new IndexWriter( directoryProvider.getDirectory(), SIMPLE_ANALYZER, false, fieldLength ); // has been created at init time
indexingParams.applyToWriter( writer, batchmode );
log.trace( "IndexWriter opened" );
}
@@ -186,6 +186,30 @@
}
/**
+ * Commits changes to a previously opened index writer.
+ *
+ * @throws SearchException on IOException during Lucene close operation.
+ * @throws AssertionFailure if there is no IndexWriter to close, or if the lock is not owned.
+ */
+ public synchronized void commitIndexWriter() {
+ assertOwnLock();
+ if ( writer != null ) {
+ try {
+ writer.commit();
+ log.trace( "Index changes commited." );
+ }
+ catch ( IOException e ) {
+ throw new SearchException( "Exception while commiting index changes", e );
+ }
+ }
+ else {
+ throw new AssertionFailure( "No open IndexWriter to commit changes." );
+ }
+ }
+
+
+
+ /**
* Closes a previously opened IndexWriter.
* @throws SearchException on IOException during Lucene close operation.
* @throws AssertionFailure if there is no IndexWriter to close, or if the lock is not owned.
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/lucene/PerDPQueueProcessor.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -103,6 +103,7 @@
for (LuceneWork lw : workOnWriter) {
lw.getWorkDelegate( worker ).performWork( lw, indexWriter );
}
+ workspace.commitIndexWriter();
//TODO next line is assuming the OptimizerStrategy will need an IndexWriter;
// would be nicer to have the strategy put an OptimizeWork on the queue,
// or just return "yes please" (true) to some method?
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentBuilder.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -583,11 +583,11 @@
case NO:
return Field.Index.NO;
case NO_NORMS:
- return Field.Index.NO_NORMS;
+ return Field.Index.NOT_ANALYZED_NO_NORMS;
case TOKENIZED:
- return Field.Index.TOKENIZED;
+ return Field.Index.ANALYZED;
case UN_TOKENIZED:
- return Field.Index.UN_TOKENIZED;
+ return Field.Index.NOT_ANALYZED;
default:
throw new AssertionFailure( "Unexpected Index: " + index );
}
@@ -750,10 +750,10 @@
}
{
Field classField =
- new Field( CLASS_FIELDNAME, entityType.getName(), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.NO );
+ new Field( CLASS_FIELDNAME, entityType.getName(), Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO );
doc.add( classField );
LuceneOptions luceneOptions = new LuceneOptionsImpl( Field.Store.YES,
- Field.Index.UN_TOKENIZED, Field.TermVector.NO, idBoost );
+ Field.Index.NOT_ANALYZED, Field.TermVector.NO, idBoost );
idBridge.set( idKeywordName, id, doc, luceneOptions );
}
buildDocumentFields( instance, doc, rootPropertiesMetadata );
Modified: search/trunk/src/java/org/hibernate/search/store/DirectoryProviderHelper.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/DirectoryProviderHelper.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/java/org/hibernate/search/store/DirectoryProviderHelper.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -77,7 +77,8 @@
FSDirectory fsDirectory = FSDirectory.getDirectory( indexDir );
if ( ! IndexReader.indexExists( fsDirectory ) ) {
log.debug( "Initialize index: '{}'", indexDir.getAbsolutePath() );
- IndexWriter iw = new IndexWriter( fsDirectory, new StandardAnalyzer(), true );
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( fsDirectory, new StandardAnalyzer(), true, fieldLength );
iw.close();
}
return fsDirectory;
Modified: search/trunk/src/java/org/hibernate/search/store/RAMDirectoryProvider.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/java/org/hibernate/search/store/RAMDirectoryProvider.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -28,7 +28,8 @@
public void start() {
directory = new RAMDirectory();
try {
- IndexWriter iw = new IndexWriter( directory, new StandardAnalyzer(), true );
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( directory, new StandardAnalyzer(), true, fieldLength );
iw.close();
}
catch (IOException e) {
Modified: search/trunk/src/test/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/analyzer/AbstractTestAnalyzer.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -23,12 +23,17 @@
private class InternalTokenStream extends TokenStream {
private int position;
- public Token next() throws IOException {
- if ( position >= getTokens().length) {
+ public Token next(final Token reusableToken) throws IOException {
+ assert reusableToken != null;
+ if ( position >= getTokens().length ) {
return null;
}
else {
- return new Token( getTokens()[position++], 0, 0 );
+ reusableToken.clear();
+ reusableToken.setTermBuffer( getTokens()[position++] );
+ reusableToken.setStartOffset( 0 );
+ reusableToken.setEndOffset( 0 );
+ return reusableToken;
}
}
}
Modified: search/trunk/src/test/org/hibernate/search/test/filter/BestDriversFilter.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/filter/BestDriversFilter.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/filter/BestDriversFilter.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -1,22 +1,23 @@
//$Id$
package org.hibernate.search.test.filter;
-import java.util.BitSet;
import java.io.IOException;
-import org.apache.lucene.search.Filter;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
-import org.apache.lucene.index.Term;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.util.OpenBitSet;
/**
* @author Emmanuel Bernard
*/
public class BestDriversFilter extends Filter {
- public BitSet bits(IndexReader reader) throws IOException {
- BitSet bitSet = new BitSet( reader.maxDoc() );
- TermDocs termDocs = reader.termDocs( new Term("score", "5") );
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ OpenBitSet bitSet = new OpenBitSet( reader.maxDoc() );
+ TermDocs termDocs = reader.termDocs( new Term( "score", "5" ) );
while ( termDocs.next() ) {
bitSet.set( termDocs.doc() );
}
Modified: search/trunk/src/test/org/hibernate/search/test/filter/ExcludeAllFilter.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/filter/ExcludeAllFilter.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/filter/ExcludeAllFilter.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -7,6 +7,7 @@
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.index.IndexReader;
+
import org.hibernate.search.filter.EmptyDocIdBitSet;
/**
@@ -14,13 +15,15 @@
*/
@SuppressWarnings("serial")
public class ExcludeAllFilter extends Filter {
-
- //ugly but useful for test purposes
+
+ // ugly but useful for test purposes
private static volatile boolean done = false;
@Override
public BitSet bits(IndexReader reader) throws IOException {
- if ( done ) throw new IllegalStateException( "Called twice" );
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
BitSet bitSet = new BitSet( reader.maxDoc() );
done = true;
return bitSet;
@@ -28,9 +31,10 @@
@Override
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
- if ( done ) throw new IllegalStateException( "Called twice" );
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
done = true;
return EmptyDocIdBitSet.instance;
}
-
}
Modified: search/trunk/src/test/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/filter/InstanceBasedExcludeAllFilter.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -5,17 +5,32 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.DocIdSet;
+import org.hibernate.search.filter.EmptyDocIdBitSet;
+
/**
* @author Emmanuel Bernard
*/
public class InstanceBasedExcludeAllFilter extends Filter {
private volatile boolean done = false;
+ @Override
public BitSet bits(IndexReader reader) throws IOException {
- if (done) throw new IllegalStateException("Called twice");
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
BitSet bitSet = new BitSet( reader.maxDoc() );
done = true;
return bitSet;
}
+
+ @Override
+ public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ if ( done ) {
+ throw new IllegalStateException( "Called twice" );
+ }
+ done = true;
+ return EmptyDocIdBitSet.instance;
+ }
}
Modified: search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/jms/master/JMSMasterTest.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -54,11 +54,11 @@
s.close();
//create the work queue to send
Document doc = new Document();
- Field field = new Field( DocumentBuilder.CLASS_FIELDNAME, ts.getClass().getName(), Field.Store.YES, Field.Index.UN_TOKENIZED );
+ Field field = new Field( DocumentBuilder.CLASS_FIELDNAME, ts.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED );
doc.add( field );
- field = new Field("id", "1", Field.Store.YES, Field.Index.UN_TOKENIZED );
+ field = new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED );
doc.add( field );
- field = new Field("logo", ts.getLogo(), Field.Store.NO, Field.Index.TOKENIZED );
+ field = new Field("logo", ts.getLogo(), Field.Store.NO, Field.Index.ANALYZED );
doc.add( field );
LuceneWork luceneWork = new AddLuceneWork(ts.getId(), String.valueOf( ts.getId() ), ts.getClass(), doc );
List<LuceneWork> queue = new ArrayList<LuceneWork>();
Modified: search/trunk/src/test/org/hibernate/search/test/reader/performance/IndexFillRunnable.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/reader/performance/IndexFillRunnable.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/reader/performance/IndexFillRunnable.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -16,23 +16,29 @@
private volatile int jobSeed = 0;
private final IndexWriter iw;
-
+
public IndexFillRunnable(IndexWriter iw) {
super();
this.iw = iw;
}
public void run() {
- Field f1 = new Field("name", "Some One " + jobSeed++, Store.NO, Index.TOKENIZED );
- Field f2 = new Field("physicalDescription", " just more people sitting around and filling my index... ", Store.NO, Index.TOKENIZED );
+ Field f1 = new Field( "name", "Some One " + jobSeed++, Store.NO, Index.ANALYZED );
+ Field f2 = new Field(
+ "physicalDescription",
+ " just more people sitting around and filling my index... ",
+ Store.NO,
+ Index.ANALYZED
+ );
Document d = new Document();
d.add( f1 );
d.add( f2 );
try {
iw.addDocument( d );
- } catch (IOException e) {
+ }
+ catch ( IOException e ) {
e.printStackTrace();
}
}
-
+
}
Modified: search/trunk/src/test/org/hibernate/search/test/reader/performance/ReaderPerformance.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/reader/performance/ReaderPerformance.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/reader/performance/ReaderPerformance.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -55,7 +55,8 @@
private void buildBigIndex() throws InterruptedException, CorruptIndexException, LockObtainFailedException, IOException {
System.out.println( "Going to create fake index..." );
FSDirectory directory = FSDirectory.getDirectory(new File(getBaseIndexDir(), Detective.class.getCanonicalName()));
- IndexWriter iw = new IndexWriter( directory, new SimpleAnalyzer(), true );
+ IndexWriter.MaxFieldLength fieldLength = new IndexWriter.MaxFieldLength( IndexWriter.DEFAULT_MAX_FIELD_LENGTH );
+ IndexWriter iw = new IndexWriter( directory, new SimpleAnalyzer(), true, fieldLength );
IndexFillRunnable filler = new IndexFillRunnable( iw );
ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool( WORKER_THREADS );
for (int batch=0; batch<=5000000; batch++){
@@ -63,6 +64,7 @@
}
executor.shutdown();
executor.awaitTermination( 600, TimeUnit.SECONDS );
+ iw.commit();
iw.optimize();
iw.close();
System.out.println( "Index created." );
Modified: search/trunk/src/test/org/hibernate/search/test/util/AnalyzerUtils.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/util/AnalyzerUtils.java 2008-11-11 10:14:22 UTC (rev 15546)
+++ search/trunk/src/test/org/hibernate/search/test/util/AnalyzerUtils.java 2008-11-11 12:57:47 UTC (rev 15547)
@@ -26,23 +26,24 @@
public static Token[] tokensFromAnalysis(Analyzer analyzer, String field, String text) throws IOException {
TokenStream stream = analyzer.tokenStream( field, new StringReader( text ) );
List<Token> tokenList = new ArrayList<Token>();
+ Token reusableToken = new Token();
while ( true ) {
- Token token = stream.next();
+
+ Token token = stream.next( reusableToken );
if ( token == null ) {
break;
}
- tokenList.add( token );
+ tokenList.add( ( Token ) token.clone() );
}
- return ( Token[] ) tokenList.toArray( new Token[0] );
+ return tokenList.toArray( new Token[tokenList.size()] );
}
public static void displayTokens(Analyzer analyzer, String field, String text) throws IOException {
Token[] tokens = tokensFromAnalysis( analyzer, field, text );
- for ( int i = 0; i < tokens.length; i++ ) {
- Token token = tokens[i];
+ for ( Token token : tokens ) {
log.debug( "[" + getTermText( token ) + "] " );
}
}
@@ -52,9 +53,7 @@
int position = 0;
- for ( int i = 0; i < tokens.length; i++ ) {
- Token token = tokens[i];
-
+ for ( Token token : tokens ) {
int increment = token.getPositionIncrement();
if ( increment > 0 ) {
@@ -72,9 +71,7 @@
StringBuilder builder = new StringBuilder();
int position = 0;
- for ( int i = 0; i < tokens.length; i++ ) {
- Token token = tokens[i];
-
+ for ( Token token : tokens ) {
int increment = token.getPositionIncrement();
if ( increment > 0 ) {
15 years, 6 months
Hibernate SVN: r15546 - in search/trunk/src/java/org/hibernate/search: impl and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-11-11 05:14:22 -0500 (Tue, 11 Nov 2008)
New Revision: 15546
Modified:
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
Log:
HSEARCH-281
Some more minor changes.
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-11 10:13:50 UTC (rev 15545)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-11 10:14:22 UTC (rev 15546)
@@ -130,9 +130,10 @@
workQueue.setSealedQueue( luceneQueue );
}
- private void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
+ private <T> void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
for ( int i = 0; i < initialSize; i++ ) {
- Work work = queue.get( i );
+ @SuppressWarnings("unchecked")
+ Work<T> work = queue.get( i );
if ( work != null ) {
if ( layer.isRightLayer( work.getType() ) ) {
queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-11 10:13:50 UTC (rev 15545)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-11 10:14:22 UTC (rev 15546)
@@ -123,9 +123,9 @@
// purge the subclasses
Set<Class<?>> subClasses = builder.getMappedSubclasses();
- Work subClassWork;
for ( Class clazz : subClasses ) {
- subClassWork = new Work( clazz, id, WorkType.PURGE_ALL );
+ @SuppressWarnings( "unchecked" )
+ Work subClassWork = new Work( clazz, id, WorkType.PURGE_ALL );
searchFactoryImplementor.getWorker().performWork( subClassWork, transactionContext );
}
}
15 years, 6 months
Hibernate SVN: r15545 - search/trunk/src/java/org/hibernate/search/backend.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-11-11 05:13:50 -0500 (Tue, 11 Nov 2008)
New Revision: 15545
Modified:
search/trunk/src/java/org/hibernate/search/backend/Worker.java
Log:
javadoc cleanup
Modified: search/trunk/src/java/org/hibernate/search/backend/Worker.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Worker.java 2008-11-10 22:32:17 UTC (rev 15544)
+++ search/trunk/src/java/org/hibernate/search/backend/Worker.java 2008-11-11 10:13:50 UTC (rev 15545)
@@ -4,10 +4,10 @@
import java.util.Properties;
import org.hibernate.search.engine.SearchFactoryImplementor;
-import org.hibernate.search.backend.TransactionContext;
/**
- * Perform work for a given session. This implementation has to be multi threaded
+ * Perform work for a given session. This implementation has to be multi threaded.
+ *
* @author Emmanuel Bernard
*/
public interface Worker {
@@ -24,7 +24,9 @@
void close();
/**
- * flush any work queue
+ * Flush any work queue.
+ *
+ * @param transactionContext the current transaction (context).
*/
void flushWorks(TransactionContext transactionContext);
}
15 years, 6 months
Hibernate SVN: r15544 - core/trunk/testsuite.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-11-10 17:32:17 -0500 (Mon, 10 Nov 2008)
New Revision: 15544
Modified:
core/trunk/testsuite/pom.xml
Log:
correct H2 database artifact coord for testsuite profile
Modified: core/trunk/testsuite/pom.xml
===================================================================
--- core/trunk/testsuite/pom.xml 2008-11-10 22:31:51 UTC (rev 15543)
+++ core/trunk/testsuite/pom.xml 2008-11-10 22:32:17 UTC (rev 15544)
@@ -126,9 +126,9 @@
<id>h2</id>
<dependencies>
<dependency>
- <groupId>org.h2database</groupId>
- <artifactId>h2database</artifactId>
- <version>1.0.20061217</version>
+ <groupId>com.h2database</groupId>
+ <artifactId>h2</artifactId>
+ <version>1.1.103</version>
</dependency>
</dependencies>
<properties>
15 years, 6 months
Hibernate SVN: r15543 - core/branches/Branch_3_3/testsuite.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-11-10 17:31:51 -0500 (Mon, 10 Nov 2008)
New Revision: 15543
Modified:
core/branches/Branch_3_3/testsuite/pom.xml
Log:
correct H2 database artifact coord for testsuite profile
Modified: core/branches/Branch_3_3/testsuite/pom.xml
===================================================================
--- core/branches/Branch_3_3/testsuite/pom.xml 2008-11-10 20:22:44 UTC (rev 15542)
+++ core/branches/Branch_3_3/testsuite/pom.xml 2008-11-10 22:31:51 UTC (rev 15543)
@@ -126,9 +126,9 @@
<id>h2</id>
<dependencies>
<dependency>
- <groupId>org.h2database</groupId>
- <artifactId>h2database</artifactId>
- <version>1.0.20061217</version>
+ <groupId>com.h2database</groupId>
+ <artifactId>h2</artifactId>
+ <version>1.1.103</version>
</dependency>
</dependencies>
<properties>
15 years, 6 months
Hibernate SVN: r15542 - in search/trunk/src/java/org/hibernate/search: backend and 2 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-11-10 15:22:44 -0500 (Mon, 10 Nov 2008)
New Revision: 15542
Modified:
search/trunk/src/java/org/hibernate/search/FullTextSession.java
search/trunk/src/java/org/hibernate/search/backend/Work.java
search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
Log:
HSEARCH-281 - made Work to Work<T>. Also changed the FullTextSession interface. Not sure if this is a good idea. Needs to be discussed.
Modified: search/trunk/src/java/org/hibernate/search/FullTextSession.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/FullTextSession.java 2008-11-10 20:14:05 UTC (rev 15541)
+++ search/trunk/src/java/org/hibernate/search/FullTextSession.java 2008-11-10 20:22:44 UTC (rev 15542)
@@ -6,15 +6,20 @@
import org.hibernate.classic.Session;
/**
- * Extends the Hibernate {@link Session} with Full text search and indexing capabilities
+ * Extends the Hibernate {@link Session} with fulltext search and indexing capabilities.
*
* @author Emmanuel Bernard
*/
public interface FullTextSession extends Session {
/**
- * Create a Query on top of a native Lucene Query returning the matching objects
+ * Create a fulltext query on top of a native Lucene query returning the matching objects
* of type <code>entities</code> and their respective subclasses.
- * If no entity is provided, no type filtering is done.
+ *
+ * @param luceneQuery The native Lucene query to be rn against the Lucene index.
+ * @param entities List of classes for type filtering. The query result will only return entities of
+ * the specified types and their respective subtype. If no class is specified no type filtering will take place.
+ *
+ * @return A <code>FullTextQuery</code> wrapping around the native Lucene wuery.
*/
FullTextQuery createFullTextQuery(org.apache.lucene.search.Query luceneQuery, Class<?>... entities);
@@ -24,12 +29,13 @@
* will not affect the index at least until commit.
*
* @param entity The entity to index - must not be <code>null</code>.
+ *
* @throws IllegalArgumentException if entity is null or not an @Indexed entity
*/
- void index(Object entity);
+ <T> void index(T entity);
/**
- * return the SearchFactory
+ * @return the <code>SearchFactory</code> instance.
*/
SearchFactory getSearchFactory();
@@ -43,15 +49,16 @@
*
* @throws IllegalArgumentException if entityType is <code>null</codE> or not an @Indexed entity type.
*/
- public void purge(Class<?> entityType, Serializable id);
+ public <T> void purge(Class<T> entityType, Serializable id);
/**
* Remove all entities from of particular class and all its subclasses from the index.
*
* @param entityType The class of the entities to remove.
+ *
* @throws IllegalArgumentException if entityType is <code>null</code> or not an @Indexed entity type.
*/
- public void purgeAll(Class<?> entityType);
+ public <T> void purgeAll(Class<T> entityType);
/**
* flush full text changes to the index
Modified: search/trunk/src/java/org/hibernate/search/backend/Work.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/Work.java 2008-11-10 20:14:05 UTC (rev 15541)
+++ search/trunk/src/java/org/hibernate/search/backend/Work.java 2008-11-10 20:22:44 UTC (rev 15542)
@@ -6,31 +6,31 @@
import org.hibernate.annotations.common.reflection.XMember;
/**
- * work unit. Only make sense inside the same session since it uses the scope principle
+ * A unit of work. Only make sense inside the same session since it uses the scope principle.
*
* @author Emmanuel Bernard
*/
-public class Work {
- private final Object entity;
- private final Class entityClass;
+public class Work<T> {
+ private final T entity;
+ private final Class<T> entityClass;
private final Serializable id;
private final XMember idGetter;
private final WorkType type;
- public Work(Object entity, Serializable id, WorkType type) {
+ public Work(T entity, Serializable id, WorkType type) {
this( entity, null, id, null, type );
}
- public Work(Class entityType, Serializable id, WorkType type) {
+ public Work(Class<T> entityType, Serializable id, WorkType type) {
this( null, entityType, id, null, type );
}
- public Work(Object entity, XMember idGetter, WorkType type) {
+ public Work(T entity, XMember idGetter, WorkType type) {
this( entity, null, null, idGetter, type );
}
-
- private Work(Object entity, Class entityClass, Serializable id,
- XMember idGetter, WorkType type) {
+
+ private Work(T entity, Class<T> entityClass, Serializable id,
+ XMember idGetter, WorkType type) {
this.entity = entity;
this.entityClass = entityClass;
this.id = id;
@@ -38,11 +38,11 @@
this.type = type;
}
- public Class getEntityClass() {
+ public Class<T> getEntityClass() {
return entityClass;
}
- public Object getEntity() {
+ public T getEntity() {
return entity;
}
Modified: search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-10 20:14:05 UTC (rev 15541)
+++ search/trunk/src/java/org/hibernate/search/backend/impl/BatchedQueueingProcessor.java 2008-11-10 20:22:44 UTC (rev 15542)
@@ -53,7 +53,9 @@
//default to a simple asynchronous operation
int min = ConfigurationParseHelper.getIntValue( properties, Environment.WORKER_THREADPOOL_SIZE, 1 );
//no queue limit
- int queueSize = ConfigurationParseHelper.getIntValue( properties, Environment.WORKER_WORKQUEUE_SIZE, Integer.MAX_VALUE );
+ int queueSize = ConfigurationParseHelper.getIntValue(
+ properties, Environment.WORKER_WORKQUEUE_SIZE, Integer.MAX_VALUE
+ );
batchSize = ConfigurationParseHelper.getIntValue( properties, Environment.WORKER_BATCHSIZE, 0 );
@@ -67,7 +69,7 @@
*/
executorService = new ThreadPoolExecutor(
min, min, 60, TimeUnit.SECONDS,
- new LinkedBlockingQueue<Runnable>(queueSize),
+ new LinkedBlockingQueue<Runnable>( queueSize ),
new ThreadPoolExecutor.CallerRunsPolicy()
);
}
@@ -84,15 +86,15 @@
else {
try {
Class processorFactoryClass = ReflectHelper.classForName( backend, BatchedQueueingProcessor.class );
- backendQueueProcessorFactory = (BackendQueueProcessorFactory) processorFactoryClass.newInstance();
+ backendQueueProcessorFactory = ( BackendQueueProcessorFactory ) processorFactoryClass.newInstance();
}
- catch (ClassNotFoundException e) {
+ catch ( ClassNotFoundException e ) {
throw new SearchException( "Unable to find processor class: " + backend, e );
}
- catch (IllegalAccessException e) {
+ catch ( IllegalAccessException e ) {
throw new SearchException( "Unable to instanciate processor class: " + backend, e );
}
- catch (InstantiationException e) {
+ catch ( InstantiationException e ) {
throw new SearchException( "Unable to instanciate processor class: " + backend, e );
}
}
@@ -129,9 +131,9 @@
}
private void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
- for ( int i = 0 ; i < initialSize ; i++ ) {
+ for ( int i = 0; i < initialSize; i++ ) {
Work work = queue.get( i );
- if ( work != null) {
+ if ( work != null ) {
if ( layer.isRightLayer( work.getType() ) ) {
queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
addWorkToBuilderQueue( luceneQueue, work );
@@ -140,19 +142,22 @@
}
}
- private <T> void addWorkToBuilderQueue(List<LuceneWork> luceneQueue, Work work) {
- @SuppressWarnings( "unchecked" )
+ private <T> void addWorkToBuilderQueue(List<LuceneWork> luceneQueue, Work<T> work) {
+ @SuppressWarnings("unchecked")
Class<T> entityClass = work.getEntityClass() != null ?
- work.getEntityClass() :
- Hibernate.getClass( work.getEntity() );
+ work.getEntityClass() :
+ Hibernate.getClass( work.getEntity() );
DocumentBuilder<T> builder = searchFactoryImplementor.getDocumentBuilder( entityClass );
if ( builder == null ) {
//might be a entity contained in
builder = searchFactoryImplementor.getContainedInOnlyBuilder( entityClass );
}
- if ( builder == null ) return;
- //TODO remove casting when Work is Work<T>
- builder.addWorkToQueue(entityClass, (T) work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
+ if ( builder == null ) {
+ return;
+ }
+ builder.addWorkToQueue(
+ entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor
+ );
}
//TODO implements parallel batchWorkers (one per Directory)
@@ -177,20 +182,22 @@
try {
executorService.awaitTermination( Long.MAX_VALUE, TimeUnit.SECONDS );
}
- catch (InterruptedException e) {
+ catch ( InterruptedException e ) {
log.error( "Unable to properly shut down asynchronous indexing work", e );
}
}
}
private static enum Layer {
- FIRST,
+ FIRST,
SECOND;
public boolean isRightLayer(WorkType type) {
- if (this == FIRST && type != WorkType.COLLECTION) return true;
+ if ( this == FIRST && type != WorkType.COLLECTION ) {
+ return true;
+ }
return this == SECOND && type == WorkType.COLLECTION;
- }
+ }
}
}
Modified: search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-11-10 20:14:05 UTC (rev 15541)
+++ search/trunk/src/java/org/hibernate/search/event/FullTextIndexEventListener.java 2008-11-10 20:22:44 UTC (rev 15542)
@@ -26,7 +26,6 @@
import org.hibernate.search.backend.Work;
import org.hibernate.search.backend.WorkType;
import org.hibernate.search.backend.impl.EventSourceTransactionContext;
-import org.hibernate.search.engine.DocumentBuilder;
import org.hibernate.search.engine.SearchFactoryImplementor;
import org.hibernate.search.util.LoggerFactory;
@@ -103,8 +102,8 @@
}
}
- protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
- Work work = new Work( entity, id, workType );
+ protected <T> void processWork(T entity, Serializable id, WorkType workType, AbstractEvent event) {
+ Work<T> work = new Work<T>( entity, id, workType );
final EventSourceTransactionContext transactionContext = new EventSourceTransactionContext( event.getSession() );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
}
15 years, 6 months
Hibernate SVN: r15541 - in search/trunk: src/java/org/hibernate/search and 8 other directories.
by hibernate-commits@lists.jboss.org
Author: hardy.ferentschik
Date: 2008-11-10 15:14:05 -0500 (Mon, 10 Nov 2008)
New Revision: 15541
Added:
search/trunk/src/java/org/hibernate/search/query/QueryHits.java
Modified:
search/trunk/build.xml
search/trunk/src/java/org/hibernate/search/ProjectionConstants.java
search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java
search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java
search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java
search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
Log:
HSEARCH-283
Replaced Hits with TopDocs
Modified: search/trunk/build.xml
===================================================================
--- search/trunk/build.xml 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/build.xml 2008-11-10 20:14:05 UTC (rev 15541)
@@ -24,7 +24,6 @@
<property name="javac.source" value="1.5"/>
<property name="javac.target" value="1.5"/>
<property name="jdbc.dir" value="jdbc"/>
- <property name="common.dir" value="${basedir}"/>
<property name="ivy.dep.dir" value="${basedir}/build/lib"/>
@@ -37,7 +36,7 @@
<taskdef resource="fr/jayasoft/ivy/ant/antlib.xml"
uri="antlib:fr.jayasoft.ivy.ant" classpathref="ivy.lib.path"/>
- <import file="${common.dir}/common-build.xml"/>
+ <import file="common-build.xml"/>
<property name="build.testresources.dir" value="${build.dir}/testresources"/>
<property name="testresources.dir" value="${basedir}/src/test-resources"/>
@@ -308,7 +307,7 @@
</copy>
<copy todir="${dist.dir}" failonerror="false">
- <fileset dir="${common.dir}">
+ <fileset dir="${basedir}">
<include name="common-build.xml"/>
</fileset>
</copy>
Modified: search/trunk/src/java/org/hibernate/search/ProjectionConstants.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/ProjectionConstants.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/ProjectionConstants.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -11,30 +11,37 @@
* Represtnts the Hibernate Entity returned in a search.
*/
public String THIS = "__HSearch_This";
+
/**
* The Lucene document returned by a search.
*/
public String DOCUMENT = "__HSearch_Document";
+
/**
* The legacy document's score from a search.
*/
public String SCORE = "__HSearch_Score";
+
/**
* The boost value of the Lucene document.
+ *
* @deprecated always return 1
*/
public String BOOST = "__HSearch_Boost";
+
/**
* Object id property
*/
public String ID = "__HSearch_id";
+
/**
* Lucene Document id
* Experimental: If you use this feature, please speak up in the forum
- *
+ * <p/>
* Expert: Lucene document id can change overtime between 2 different IndexReader opening.
*/
public String DOCUMENT_ID = "__HSearch_DocumentId";
+
/**
* Lucene {@link org.apache.lucene.search.Explanation} object describing the score computation for
* the matching object/document
@@ -44,7 +51,7 @@
* and using fullTextQuery.explain(int)
*/
public String EXPLANATION = "__HSearch_Explanation";
-
+
/**
* Object class
*/
Modified: search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/engine/DocumentExtractor.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -5,27 +5,24 @@
import java.io.Serializable;
import org.apache.lucene.document.Document;
-import org.apache.lucene.search.Hits;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.hibernate.search.engine.EntityInfo;
+
import org.hibernate.search.ProjectionConstants;
+import org.hibernate.search.query.QueryHits;
/**
* @author Emmanuel Bernard
* @author John Griffin
+ * @author Hardy Ferentschik
*/
public class DocumentExtractor {
private final SearchFactoryImplementor searchFactoryImplementor;
private final String[] projection;
- private final IndexSearcher searcher;
- private final Query preparedQuery;
+ private final QueryHits queryHits;
- public DocumentExtractor(Query preparedQuery, IndexSearcher searcher, SearchFactoryImplementor searchFactoryImplementor, String... projection) {
+ public DocumentExtractor(QueryHits queryHits, SearchFactoryImplementor searchFactoryImplementor, String... projection) {
this.searchFactoryImplementor = searchFactoryImplementor;
this.projection = projection;
- this.searcher = searcher;
- this.preparedQuery = preparedQuery;
+ this.queryHits = queryHits;
}
private EntityInfo extract(Document document) {
@@ -35,20 +32,19 @@
if ( projection != null && projection.length > 0 ) {
projected = DocumentBuilder.getDocumentFields( searchFactoryImplementor, clazz, document, projection );
}
- EntityInfo entityInfo = new EntityInfo( clazz, id, projected );
- return entityInfo;
+ return new EntityInfo( clazz, id, projected );
}
- public EntityInfo extract(Hits hits, int index) throws IOException {
- Document doc = hits.doc( index );
+ public EntityInfo extract(int index) throws IOException {
+ Document doc = queryHits.doc( index );
//TODO if we are only looking for score (unlikely), avoid accessing doc (lazy load)
EntityInfo entityInfo = extract( doc );
Object[] eip = entityInfo.projection;
if ( eip != null && eip.length > 0 ) {
- for (int x = 0; x < projection.length; x++) {
+ for ( int x = 0; x < projection.length; x++ ) {
if ( ProjectionConstants.SCORE.equals( projection[x] ) ) {
- eip[x] = hits.score( index );
+ eip[x] = queryHits.score( index );
}
else if ( ProjectionConstants.ID.equals( projection[x] ) ) {
eip[x] = entityInfo.id;
@@ -57,18 +53,18 @@
eip[x] = doc;
}
else if ( ProjectionConstants.DOCUMENT_ID.equals( projection[x] ) ) {
- eip[x] = hits.id( index );
+ eip[x] = queryHits.docId( index );
}
else if ( ProjectionConstants.BOOST.equals( projection[x] ) ) {
eip[x] = doc.getBoost();
}
else if ( ProjectionConstants.EXPLANATION.equals( projection[x] ) ) {
- eip[x] = searcher.explain( preparedQuery, hits.id( index ) );
+ eip[x] = queryHits.explain( index );
}
else if ( ProjectionConstants.THIS.equals( projection[x] ) ) {
//THIS could be projected more than once
//THIS loading delayed to the Loader phase
- entityInfo.indexesOfThis.add(x);
+ entityInfo.indexesOfThis.add( x );
}
}
}
Modified: search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/impl/FullTextSessionImpl.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -89,7 +89,7 @@
/**
* {@inheritDoc}
*/
- public void purgeAll(Class entityType) {
+ public <T> void purgeAll(Class<T> entityType) {
purge( entityType, null );
}
@@ -101,7 +101,7 @@
/**
* {@inheritDoc}
*/
- public void purge(Class<?> entityType, Serializable id) {
+ public <T> void purge(Class<T> entityType, Serializable id) {
if ( entityType == null ) {
return;
}
@@ -115,35 +115,35 @@
throw new IllegalArgumentException( msg );
}
- Work work;
+ Work<T> work;
if ( id == null ) {
// purge the main entity
- work = new Work( entityType, id, WorkType.PURGE_ALL );
+ work = new Work<T>( entityType, id, WorkType.PURGE_ALL );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
// purge the subclasses
Set<Class<?>> subClasses = builder.getMappedSubclasses();
+ Work subClassWork;
for ( Class clazz : subClasses ) {
- work = new Work( clazz, id, WorkType.PURGE_ALL );
- searchFactoryImplementor.getWorker().performWork( work, transactionContext );
+ subClassWork = new Work( clazz, id, WorkType.PURGE_ALL );
+ searchFactoryImplementor.getWorker().performWork( subClassWork, transactionContext );
}
}
else {
- work = new Work( entityType, id, WorkType.PURGE );
+ work = new Work<T>( entityType, id, WorkType.PURGE );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
}
}
/**
- * (re)index an entity.
- * Non indexable entities are ignored
- * The entity must be associated with the session
+ * (Re-)index an entity.
+ * The entity must be associated with the session and non indexable entities are ignored.
*
* @param entity The entity to index - must not be <code>null</code>.
*
* @throws IllegalArgumentException if entity is null or not an @Indexed entity
*/
- public void index(Object entity) {
+ public <T> void index(T entity) {
if ( entity == null ) {
throw new IllegalArgumentException( "Entity to index should not be null" );
}
@@ -157,7 +157,7 @@
throw new IllegalArgumentException( msg );
}
Serializable id = session.getIdentifier( entity );
- Work work = new Work( entity, id, WorkType.INDEX );
+ Work<T> work = new Work<T>( entity, id, WorkType.INDEX );
searchFactoryImplementor.getWorker().performWork( work, transactionContext );
//TODO
Modified: search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextFilterImpl.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -1,8 +1,8 @@
// $Id$
package org.hibernate.search.query;
-import java.util.Map;
import java.util.HashMap;
+import java.util.Map;
import org.hibernate.search.FullTextFilter;
@@ -30,7 +30,6 @@
return parameters.get( name );
}
-
public Map<String, Object> getParameters() {
return parameters;
}
Modified: search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/FullTextQueryImpl.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -18,12 +18,12 @@
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
import org.slf4j.Logger;
import org.hibernate.Criteria;
@@ -132,29 +132,29 @@
return new IteratorImpl( Collections.EMPTY_LIST, noLoader );
}
try {
- QueryAndHits queryAndHits = getQueryAndHits( searcher );
+ QueryHits queryHits = getQueryHits( searcher );
int first = first();
- int max = max( first, queryAndHits.hits );
- Session sess = (Session) this.session;
+ int max = max( first, queryHits.totalHits );
+ Session sess = ( Session ) this.session;
int size = max - first + 1 < 0 ? 0 : max - first + 1;
List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
- DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
- for (int index = first; index <= max; index++) {
+ DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactoryImplementor, indexProjection );
+ for ( int index = first; index <= max; index++ ) {
//TODO use indexSearcher.getIndexReader().document( hits.id(index), FieldSelector(indexProjection) );
- infos.add( extractor.extract( queryAndHits.hits, index ) );
+ infos.add( extractor.extract( index ) );
}
Loader loader = getLoader( sess, searchFactoryImplementor );
return new IteratorImpl( infos, loader );
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new HibernateException( "Unable to query Lucene index", e );
}
finally {
try {
closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
}
- catch (SearchException e) {
+ catch ( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -167,9 +167,11 @@
return loader;
}
if ( criteria != null ) {
- if ( classes.length > 1 ) throw new SearchException( "Cannot mix criteria and multiple entity types" );
+ if ( classes.length > 1 ) {
+ throw new SearchException( "Cannot mix criteria and multiple entity types" );
+ }
if ( criteria instanceof CriteriaImpl ) {
- String targetEntity = ( (CriteriaImpl) criteria ).getEntityOrClassName();
+ String targetEntity = ( ( CriteriaImpl ) criteria ).getEntityOrClassName();
if ( classes.length == 1 && !classes[0].getName().equals( targetEntity ) ) {
throw new SearchException( "Criteria query entity should match query entity" );
}
@@ -178,7 +180,7 @@
Class entityType = ReflectHelper.classForName( targetEntity );
classes = new Class[] { entityType };
}
- catch (ClassNotFoundException e) {
+ catch ( ClassNotFoundException e ) {
throw new SearchException( "Unable to load entity class from criteria: " + targetEntity, e );
}
}
@@ -211,19 +213,21 @@
IndexSearcher searcher = buildSearcher( searchFactory );
//FIXME: handle null searcher
try {
- QueryAndHits queryAndHits = getQueryAndHits( searcher );
+ QueryHits queryHits = getQueryHits( searcher );
int first = first();
- int max = max( first, queryAndHits.hits );
- DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactory, indexProjection );
- Loader loader = getLoader( (Session) this.session, searchFactory );
- return new ScrollableResultsImpl( searcher, queryAndHits.hits, first, max, fetchSize, extractor, loader, searchFactory );
+ int max = max( first, queryHits.totalHits );
+ DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactory, indexProjection );
+ Loader loader = getLoader( ( Session ) this.session, searchFactory );
+ return new ScrollableResultsImpl(
+ searcher, first, max, fetchSize, extractor, loader, searchFactory
+ );
}
- catch (IOException e) {
+ catch ( IOException e ) {
//close only in case of exception
try {
closeSearcher( searcher, searchFactory.getReaderProvider() );
}
- catch (SearchException ee) {
+ catch ( SearchException ee ) {
//we have the initial issue already
}
throw new HibernateException( "Unable to query Lucene index", e );
@@ -239,18 +243,20 @@
SearchFactoryImplementor searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
//find the directories
IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
- if ( searcher == null ) return Collections.EMPTY_LIST;
+ if ( searcher == null ) {
+ return Collections.EMPTY_LIST;
+ }
try {
- QueryAndHits queryAndHits = getQueryAndHits( searcher );
+ QueryHits queryHits = getQueryHits( searcher );
int first = first();
- int max = max( first, queryAndHits.hits );
- Session sess = (Session) this.session;
+ int max = max( first, queryHits.totalHits );
+ Session sess = ( Session ) this.session;
int size = max - first + 1 < 0 ? 0 : max - first + 1;
List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
- DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
- for (int index = first; index <= max; index++) {
- infos.add( extractor.extract( queryAndHits.hits, index ) );
+ DocumentExtractor extractor = new DocumentExtractor( queryHits, searchFactoryImplementor, indexProjection );
+ for ( int index = first; index <= max; index++ ) {
+ infos.add( extractor.extract( index ) );
}
Loader loader = getLoader( sess, searchFactoryImplementor );
List list = loader.load( infos.toArray( new EntityInfo[infos.size()] ) );
@@ -262,14 +268,14 @@
return resultTransformer.transformList( list );
}
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new HibernateException( "Unable to query Lucene index", e );
}
finally {
try {
closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
}
- catch (SearchException e) {
+ catch ( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -279,16 +285,18 @@
Explanation explanation = null;
SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
Searcher searcher = buildSearcher( searchFactoryImplementor );
- if (searcher == null) {
- throw new SearchException("Unable to build explanation for document id:"
- + documentId + ". no index found");
+ if ( searcher == null ) {
+ throw new SearchException(
+ "Unable to build explanation for document id:"
+ + documentId + ". no index found"
+ );
}
try {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
buildFilters();
explanation = searcher.explain( query, documentId );
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new HibernateException( "Unable to query Lucene index and build explanation", e );
}
finally {
@@ -296,7 +304,7 @@
try {
closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
}
- catch (SearchException e) {
+ catch ( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -307,16 +315,17 @@
* Execute the lucene search and return the machting hits.
*
* @param searcher The index searcher.
- * @return The lucene hits.
+ *
+ * @return An instance of <code>QueryHits</code> wrapping the Lucene query and the matching documents.
+ *
* @throws IOException in case there is an error executing the lucene search.
*/
- private QueryAndHits getQueryAndHits(Searcher searcher) throws IOException {
- Hits hits;
+ private QueryHits getQueryHits(Searcher searcher) throws IOException {
org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
buildFilters();
- hits = searcher.search( query, filter, sort );
- setResultSize( hits );
- return new QueryAndHits( query, hits );
+ QueryHits queryHits = new QueryHits( searcher, query, filter, sort );
+ resultSize = queryHits.totalHits;
+ return queryHits;
}
private void buildFilters() {
@@ -325,12 +334,14 @@
}
ChainedFilter chainedFilter = new ChainedFilter();
- for (FullTextFilterImpl fullTextFilter : filterDefinitions.values()) {
- Filter filter = buildLuceneFilter(fullTextFilter);
+ for ( FullTextFilterImpl fullTextFilter : filterDefinitions.values() ) {
+ Filter filter = buildLuceneFilter( fullTextFilter );
chainedFilter.addFilter( filter );
}
- if ( filter != null ) chainedFilter.addFilter( filter );
+ if ( filter != null ) {
+ chainedFilter.addFilter( filter );
+ }
filter = chainedFilter;
}
@@ -339,6 +350,7 @@
*
* @param fullTextFilter the Hibernate specific <code>FullTextFilter</code> used to create the
* Lucene <code>Filter</code>.
+ *
* @return the Lucene filter mapped to the filter definition
*/
private Filter buildLuceneFilter(FullTextFilterImpl fullTextFilter) {
@@ -350,8 +362,8 @@
* as FilterCachingStrategy ensure a memory barrier between concurrent thread calls
*/
FilterDef def = searchFactoryImplementor.getFilterDefinition( fullTextFilter.getName() );
- Object instance = createFilterInstance(fullTextFilter, def);
- FilterKey key = createFilterKey(def, instance);
+ Object instance = createFilterInstance( fullTextFilter, def );
+ FilterKey key = createFilterKey( def, instance );
// try to get the filter out of the cache
Filter filter = cacheInstance( def.getCacheMode() ) ?
@@ -359,7 +371,7 @@
null;
if ( filter == null ) {
- filter = createFilter(def, instance);
+ filter = createFilter( def, instance );
// add filter to cache if we have to
if ( cacheInstance( def.getCacheMode() ) ) {
@@ -373,33 +385,41 @@
Filter filter = null;
if ( def.getFactoryMethod() != null ) {
try {
- filter = (Filter) def.getFactoryMethod().invoke( instance );
+ filter = ( Filter ) def.getFactoryMethod().invoke( instance );
}
- catch (IllegalAccessException e) {
- throw new SearchException( "Unable to access @Factory method: "
- + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ catch ( IllegalAccessException e ) {
+ throw new SearchException(
+ "Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
}
- catch (InvocationTargetException e) {
- throw new SearchException( "Unable to access @Factory method: "
- + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ catch ( InvocationTargetException e ) {
+ throw new SearchException(
+ "Unable to access @Factory method: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
}
- catch (ClassCastException e) {
- throw new SearchException( "@Key method does not return a org.apache.lucene.search.Filter class: "
- + def.getImpl().getName() + "." + def.getFactoryMethod().getName() );
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "@Key method does not return a org.apache.lucene.search.Filter class: "
+ + def.getImpl().getName() + "." + def.getFactoryMethod().getName()
+ );
}
}
else {
try {
- filter = (Filter) instance;
+ filter = ( Filter ) instance;
}
- catch (ClassCastException e) {
- throw new SearchException( "Filter implementation does not implement the Filter interface: "
- + def.getImpl().getName() + ". "
- + (def.getFactoryMethod() != null ? def.getFactoryMethod().getName() : ""), e );
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "Filter implementation does not implement the Filter interface: "
+ + def.getImpl().getName() + ". "
+ + ( def.getFactoryMethod() != null ? def.getFactoryMethod().getName() : "" ), e
+ );
}
}
- filter = addCachingWrapperFilter(filter, def);
+ filter = addCachingWrapperFilter( filter, def );
return filter;
}
@@ -408,13 +428,14 @@
*
* @param filter the filter which maybe gets wrapped.
* @param def The filter definition used to decide whether wrapping should occur or not.
+ *
* @return The original filter or wrapped filter depending on the information extracted from
- * <code>def</code>.
+ * <code>def</code>.
*/
private Filter addCachingWrapperFilter(Filter filter, FilterDef def) {
if ( cacheResults( def.getCacheMode() ) ) {
int cachingWrapperFilterSize = getSearchFactoryImplementor().getFilterCacheBitResultsSize();
- filter = new org.hibernate.search.filter.CachingWrapperFilter(filter, cachingWrapperFilterSize);
+ filter = new org.hibernate.search.filter.CachingWrapperFilter( filter, cachingWrapperFilterSize );
}
return filter;
@@ -433,27 +454,35 @@
}
public boolean equals(Object obj) {
- if ( !( obj instanceof FilterKey ) ) return false;
- FilterKey that = (FilterKey) obj;
+ if ( !( obj instanceof FilterKey ) ) {
+ return false;
+ }
+ FilterKey that = ( FilterKey ) obj;
return this.getImpl().equals( that.getImpl() );
}
};
}
else {
try {
- key = (FilterKey) def.getKeyMethod().invoke( instance );
+ key = ( FilterKey ) def.getKeyMethod().invoke( instance );
}
- catch (IllegalAccessException e) {
- throw new SearchException( "Unable to access @Key method: "
- + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ catch ( IllegalAccessException e ) {
+ throw new SearchException(
+ "Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
}
- catch (InvocationTargetException e) {
- throw new SearchException( "Unable to access @Key method: "
- + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ catch ( InvocationTargetException e ) {
+ throw new SearchException(
+ "Unable to access @Key method: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
}
- catch (ClassCastException e) {
- throw new SearchException( "@Key method does not return FilterKey: "
- + def.getImpl().getName() + "." + def.getKeyMethod().getName() );
+ catch ( ClassCastException e ) {
+ throw new SearchException(
+ "@Key method does not return FilterKey: "
+ + def.getImpl().getName() + "." + def.getKeyMethod().getName()
+ );
}
}
key.setImpl( def.getImpl() );
@@ -466,28 +495,29 @@
}
private Object createFilterInstance(FullTextFilterImpl fullTextFilter,
- FilterDef def) {
+ FilterDef def) {
Object instance;
try {
instance = def.getImpl().newInstance();
}
- catch (InstantiationException e) {
+ catch ( InstantiationException e ) {
throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
}
- catch (IllegalAccessException e) {
+ catch ( IllegalAccessException e ) {
throw new SearchException( "Unable to create @FullTextFilterDef: " + def.getImpl(), e );
}
- for (Map.Entry<String, Object> entry : fullTextFilter.getParameters().entrySet()) {
+ for ( Map.Entry<String, Object> entry : fullTextFilter.getParameters().entrySet() ) {
def.invoke( entry.getKey(), instance, entry.getValue() );
}
- if ( cacheInstance( def.getCacheMode() ) && def.getKeyMethod() == null && fullTextFilter.getParameters().size() > 0 ) {
+ if ( cacheInstance( def.getCacheMode() ) && def.getKeyMethod() == null && fullTextFilter.getParameters()
+ .size() > 0 ) {
throw new SearchException( "Filter with parameters and no @Key method: " + fullTextFilter.getName() );
}
return instance;
}
private org.apache.lucene.search.Query filterQueryByClasses(org.apache.lucene.search.Query luceneQuery) {
- if ( ! needClassFilterClause ) {
+ if ( !needClassFilterClause ) {
return luceneQuery;
}
else {
@@ -496,7 +526,7 @@
BooleanQuery classFilter = new BooleanQuery();
//annihilate the scoring impact of DocumentBuilder.CLASS_FIELDNAME
classFilter.setBoost( 0 );
- for (Class clazz : classesAndSubclasses) {
+ for ( Class clazz : classesAndSubclasses ) {
Term t = new Term( DocumentBuilder.CLASS_FIELDNAME, clazz.getName() );
TermQuery termQuery = new TermQuery( t );
classFilter.add( termQuery, BooleanClause.Occur.SHOULD );
@@ -508,12 +538,15 @@
}
}
- private int max(int first, Hits hits) {
- return maxResults == null ?
- hits.length() - 1 :
- maxResults + first < hits.length() ?
- first + maxResults - 1 :
- hits.length() - 1;
+ private int max(int first, int totalHits) {
+ if ( maxResults == null ) {
+ return totalHits - 1;
+ }
+ else {
+ return maxResults + first < totalHits ?
+ first + maxResults - 1 :
+ totalHits - 1;
+ }
}
private int first() {
@@ -537,12 +570,15 @@
// empty classes array means search over all indexed enities,
// but we have to make sure there is at least one
if ( builders.isEmpty() ) {
- throw new HibernateException( "There are no mapped entities (don't forget to add @Indexed to at least one class)." );
+ throw new HibernateException(
+ "There are no mapped entities (don't forget to add @Indexed to at least one class)."
+ );
}
- for (DocumentBuilder builder : builders.values()) {
+ for ( DocumentBuilder builder : builders.values() ) {
searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
- final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForAllShards();
+ final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy()
+ .getDirectoryProvidersForAllShards();
populateDirectories( directories, directoryProviders, searchFactoryImplementor );
}
classesAndSubclasses = null;
@@ -550,18 +586,22 @@
else {
Set<Class<?>> involvedClasses = new HashSet<Class<?>>( classes.length );
Collections.addAll( involvedClasses, classes );
- for (Class<?> clazz : classes) {
+ for ( Class<?> clazz : classes ) {
DocumentBuilder<?> builder = builders.get( clazz );
- if ( builder != null ) involvedClasses.addAll( builder.getMappedSubclasses() );
+ if ( builder != null ) {
+ involvedClasses.addAll( builder.getMappedSubclasses() );
+ }
}
- for (Class clazz : involvedClasses) {
+ for ( Class clazz : involvedClasses ) {
DocumentBuilder builder = builders.get( clazz );
//TODO should we rather choose a polymorphic path and allow non mapped entities
- if ( builder == null )
+ if ( builder == null ) {
throw new HibernateException( "Not a mapped entity (don't forget to add @Indexed): " + clazz );
+ }
- final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy().getDirectoryProvidersForAllShards();
+ final DirectoryProvider[] directoryProviders = builder.getDirectoryProviderSelectionStrategy()
+ .getDirectoryProvidersForAllShards();
searcherSimilarity = checkSimilarity( searcherSimilarity, builder );
populateDirectories( directories, directoryProviders, searchFactoryImplementor );
}
@@ -570,33 +610,41 @@
//compute optimization needClassFilterClause
//if at least one DP contains one class that is not part of the targeted classesAndSubclasses we can't optimize
- if ( classesAndSubclasses != null) {
- for (DirectoryProvider dp : directories) {
- final Set<Class<?>> classesInDirectoryProvider = searchFactoryImplementor.getClassesInDirectoryProvider( dp );
+ if ( classesAndSubclasses != null ) {
+ for ( DirectoryProvider dp : directories ) {
+ final Set<Class<?>> classesInDirectoryProvider = searchFactoryImplementor.getClassesInDirectoryProvider(
+ dp
+ );
// if a DP contains only one class, we know for sure it's part of classesAndSubclasses
if ( classesInDirectoryProvider.size() > 1 ) {
//risk of needClassFilterClause
- for (Class clazz : classesInDirectoryProvider) {
- if ( ! classesAndSubclasses.contains( clazz ) ) {
+ for ( Class clazz : classesInDirectoryProvider ) {
+ if ( !classesAndSubclasses.contains( clazz ) ) {
this.needClassFilterClause = true;
break;
}
}
}
- if ( this.needClassFilterClause ) break;
+ if ( this.needClassFilterClause ) {
+ break;
+ }
}
}
//set up the searcher
final DirectoryProvider[] directoryProviders = directories.toArray( new DirectoryProvider[directories.size()] );
- IndexSearcher is = new IndexSearcher( searchFactoryImplementor.getReaderProvider().openReader( directoryProviders ) );
+ IndexSearcher is = new IndexSearcher(
+ searchFactoryImplementor.getReaderProvider().openReader(
+ directoryProviders
+ )
+ );
is.setSimilarity( searcherSimilarity );
return is;
}
private void populateDirectories(List<DirectoryProvider> directories, DirectoryProvider[] directoryProviders,
SearchFactoryImplementor searchFactoryImplementor) {
- for (DirectoryProvider provider : directoryProviders) {
+ for ( DirectoryProvider provider : directoryProviders ) {
if ( !directories.contains( provider ) ) {
directories.add( provider );
}
@@ -608,7 +656,10 @@
similarity = builder.getSimilarity();
}
else if ( !similarity.getClass().equals( builder.getSimilarity().getClass() ) ) {
- throw new HibernateException( "Cannot perform search on two entities with differing Similarity implementations (" + similarity.getClass().getName() + " & " + builder.getSimilarity().getClass().getName() + ")" );
+ throw new HibernateException(
+ "Cannot perform search on two entities with differing Similarity implementations (" + similarity.getClass()
+ .getName() + " & " + builder.getSimilarity().getClass().getName() + ")"
+ );
}
return similarity;
@@ -617,16 +668,11 @@
private void closeSearcher(Searcher searcher, ReaderProvider readerProvider) {
Set<IndexReader> indexReaders = getIndexReaders( searcher );
- for (IndexReader indexReader : indexReaders) {
+ for ( IndexReader indexReader : indexReaders ) {
readerProvider.closeReader( indexReader );
}
}
- private void setResultSize(Hits hits) {
- resultSize = hits.length();
- }
-
-
public int getResultSize() {
if ( resultSize == null ) {
//get result size without object initialization
@@ -636,12 +682,12 @@
resultSize = 0;
}
else {
- Hits hits;
+ TopDocs hits;
try {
- hits = getQueryAndHits( searcher ).hits;
- resultSize = hits.length();
+ hits = getQueryHits( searcher ).topDocs;
+ resultSize = hits.totalHits;
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new HibernateException( "Unable to query Lucene index", e );
}
finally {
@@ -650,7 +696,7 @@
closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
//searchFactoryImplementor.getReaderProvider().closeReader( searcher.getIndexReader() );
}
- catch (SearchException e) {
+ catch ( SearchException e ) {
log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
}
}
@@ -723,7 +769,9 @@
filterDefinitions = new HashMap<String, FullTextFilterImpl>();
}
FullTextFilterImpl filterDefinition = filterDefinitions.get( name );
- if ( filterDefinition != null ) return filterDefinition;
+ if ( filterDefinition != null ) {
+ return filterDefinition;
+ }
filterDefinition = new FullTextFilterImpl();
filterDefinition.setName( name );
@@ -758,14 +806,4 @@
throw new UnsupportedOperationException( "noLoader should not be used" );
}
};
-
- private static class QueryAndHits {
- private QueryAndHits(org.apache.lucene.search.Query preparedQuery, Hits hits) {
- this.preparedQuery = preparedQuery;
- this.hits = hits;
- }
-
- public final org.apache.lucene.search.Query preparedQuery;
- public final Hits hits;
- }
}
Added: search/trunk/src/java/org/hibernate/search/query/QueryHits.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/QueryHits.java (rev 0)
+++ search/trunk/src/java/org/hibernate/search/query/QueryHits.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -0,0 +1,95 @@
+// $Id:$
+/*
+* JBoss, Home of Professional Open Source
+* Copyright 2008, Red Hat Middleware LLC, and individual contributors
+* by the @authors tag. See the copyright.txt in the distribution for a
+* full listing of individual contributors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* http://www.apache.org/licenses/LICENSE-2.0
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.hibernate.search.query;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+
+import org.hibernate.search.SearchException;
+
+/**
+ * A helper class which gives access to the current query and its hits. This class will dynamically
+ * reload the underlying <code>TopDocs</code> if required.
+ *
+ * @author Hardy Ferentschik
+ */
+public class QueryHits {
+
+ private static final int DEFAULT_TOP_DOC_RETRIEVAL_SIZE = 100;
+ public final org.apache.lucene.search.Query preparedQuery;
+ public final Searcher searcher;
+ public final Filter filter;
+ public final Sort sort;
+ public final int totalHits;
+ public TopDocs topDocs;
+
+ public QueryHits(Searcher searcher, org.apache.lucene.search.Query preparedQuery, Filter filter, Sort sort)
+ throws IOException {
+ this.preparedQuery = preparedQuery;
+ this.searcher = searcher;
+ this.filter = filter;
+ this.sort = sort;
+ updateTopDocs( DEFAULT_TOP_DOC_RETRIEVAL_SIZE );
+ totalHits = topDocs.totalHits;
+ }
+
+ public Document doc(int index) throws IOException {
+ return searcher.doc( docId( index ) );
+ }
+
+ public ScoreDoc scoreDoc(int index) throws IOException {
+ if ( index >= totalHits ) {
+ throw new SearchException("Not a valid ScoreDoc index: " + index);
+ }
+
+ // TODO - Is there a better way to get more TopDocs? Get more or less?
+ if ( index >= topDocs.scoreDocs.length ) {
+ updateTopDocs( 2 * index );
+ }
+
+ return topDocs.scoreDocs[index];
+ }
+
+ public int docId(int index) throws IOException {
+ return scoreDoc( index ).doc;
+ }
+
+ public float score(int index) throws IOException {
+ return scoreDoc( index ).score;
+ }
+
+ public Explanation explain(int index) throws IOException {
+ return searcher.explain( preparedQuery, docId( index ) );
+ }
+
+ private void updateTopDocs(int n) throws IOException {
+ if ( sort == null ) {
+ topDocs = searcher.search( preparedQuery, filter, n );
+ }
+ else {
+ topDocs = searcher.search( preparedQuery, filter, n, sort );
+ }
+ }
+}
Property changes on: search/trunk/src/java/org/hibernate/search/query/QueryHits.java
___________________________________________________________________
Name: svn:keywords
+ Id
Modified: search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/java/org/hibernate/search/query/ScrollableResultsImpl.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -15,7 +15,6 @@
import java.util.Map;
import java.util.TimeZone;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.slf4j.Logger;
@@ -52,9 +51,8 @@
*/
public class ScrollableResultsImpl implements ScrollableResults {
private static final Logger log = LoggerFactory.make();
- private final IndexSearcher searcher;
private final SearchFactory searchFactory;
- private final Hits hits;
+ private final IndexSearcher searcher;
private final int first;
private final int max;
private final int fetchSize;
@@ -64,13 +62,11 @@
private final DocumentExtractor documentExtractor;
private final Map<EntityInfo, Object[]> resultContext;
- public ScrollableResultsImpl(
- IndexSearcher searcher, Hits hits, int first, int max, int fetchSize, DocumentExtractor extractor,
+ public ScrollableResultsImpl( IndexSearcher searcher, int first, int max, int fetchSize, DocumentExtractor extractor,
Loader loader, SearchFactory searchFactory
) {
- this.searcher = searcher;
this.searchFactory = searchFactory;
- this.hits = hits;
+ this.searcher = searcher;
this.first = first;
this.max = max;
this.current = first;
@@ -106,12 +102,12 @@
try {
if ( entityInfos[x - first] == null ) {
//FIXME should check that clazz match classes but this complicates a lot the firstResult/maxResult
- entityInfos[x - first] = documentExtractor.extract( hits, x );
+ entityInfos[x - first] = documentExtractor.extract( x );
entityInfosLoaded.add( entityInfos[x - first] );
}
}
catch (IOException e) {
- throw new HibernateException( "Unable to read Lucene hits[" + x + "]", e );
+ throw new HibernateException( "Unable to read Lucene topDocs[" + x + "]", e );
}
}
@@ -165,7 +161,7 @@
* amount positive or negative, we perform the same tests that
* we performed in next() and previous().
*
- * @param i
+ * @param i the scroll distance.
* @return boolean
* @throws HibernateException
*/
Modified: search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/FSDirectoryTest.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -10,10 +10,11 @@
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+
import org.hibernate.Session;
import org.hibernate.search.Environment;
import org.hibernate.search.store.FSDirectoryProvider;
@@ -132,10 +133,11 @@
try {
QueryParser qp = new QueryParser( "id", new StandardAnalyzer() );
Query query = qp.parse( "title:Action OR Abstract:Action" );
- Hits hits = searcher.search( query );
- assertEquals( 2, hits.length() );
- assertTrue( hits.score( 0 ) == 2 * hits.score( 1 ) );
- assertEquals( "Hibernate in Action", hits.doc( 0 ).get( "title" ) );
+ TopDocs hits = searcher.search( query, 1000 );
+ assertEquals( 2, hits.totalHits );
+ assertTrue( hits.scoreDocs[0].score == 2 * hits.scoreDocs[1].score );
+ org.apache.lucene.document.Document doc = searcher.doc( 0 );
+ assertEquals( "Hibernate in Action", doc.get( "title" ) );
}
finally {
searcher.close();
@@ -164,9 +166,10 @@
// ( fails when deleting -concurrently- to IndexSearcher initialization! )
FileHelper.delete(getBaseIndexDir());
TermQuery query = new TermQuery( new Term("title","action") );
- Hits hits = searcher.search( query );
- assertEquals( 1, hits.length() );
- assertEquals( "Hibernate Search in Action", hits.doc( 0 ).get( "title" ) );
+ TopDocs hits = searcher.search( query, 1000 );
+ assertEquals( 1, hits.totalHits );
+ org.apache.lucene.document.Document doc = searcher.doc( 0 );
+ assertEquals( "Hibernate Search in Action", doc.get( "title" ) );
searcher.close();
}
Modified: search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/id/providedId/ProvidedIdTest.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -2,9 +2,9 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
import org.hibernate.Session;
import org.hibernate.Transaction;
@@ -61,12 +61,12 @@
DirectoryProvider provider = fullTextSession.getSearchFactory()
.getDirectoryProviders( ProvidedIdPerson.class )[0];
IndexSearcher searcher = new IndexSearcher( provider.getDirectory() );
- Hits hits = searcher.search( luceneQuery );
+ TopDocs hits = searcher.search( luceneQuery, 1000 );
searcher.close();
transaction.commit();
session.close();
- assertEquals( 3, hits.length() );
+ assertEquals( 3, hits.totalHits );
}
Modified: search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/perf/SearcherThread.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -10,15 +10,18 @@
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.document.Document;
import org.slf4j.Logger;
import org.hibernate.SessionFactory;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
+import org.hibernate.search.SearchException;
import org.hibernate.search.util.LoggerFactory;
/**
@@ -70,12 +73,13 @@
Query q = getQuery();
long start = System.currentTimeMillis();
// Search
- Hits hits = indexsearcher.search( q );
+ TopDocs hits = indexsearcher.search( q, 1000 );
List<String> names = new ArrayList<String>(100);
for (int i = 0 ; i < 100 ; i++) {
- names.add( hits.doc( i ).get( "name" ) );
+ Document doc = getDocument( indexsearcher, hits.scoreDocs[i].doc );
+ names.add( doc.get( "name" ) );
}
- int resultSize = hits.length();
+ int resultSize = hits.totalHits;
long totalTime = System.currentTimeMillis() - start;
// log.error( "Lucene [ Thread-id : " + threadId + " ] Total time taken for search is : " + totalTime + "ms with total no. of matching records : " + hits.length() );
setTime( totalTime );
@@ -92,6 +96,14 @@
}
}
+ private Document getDocument(Searcher searcher, int docId ) {
+ try {
+ return searcher.doc( docId );
+ } catch (IOException ioe) {
+ throw new SearchException( "Unable to retrieve document", ioe );
+ }
+ }
+
private Query getQuery() throws ParseException {
QueryParser qp = new QueryParser( "t", new StandardAnalyzer() );
qp.setLowercaseExpandedTerms( true );
Modified: search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/query/ProjectionQueryTest.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -230,7 +230,7 @@
assertEquals( "dept incorrect", "Accounting", projection[2] );
assertEquals( "THIS incorrect", "Jackson", ( (Employee) projection[3] ).getLastname() );
assertEquals( "THIS incorrect", projection[3], s.get( Employee.class, (Serializable) projection[0] ) );
- assertEquals( "SCORE incorrect", 1.0F, projection[4] );
+ assertEquals( "SCORE incorrect", 1.9162908F, projection[4] );
assertTrue( "DOCUMENT incorrect", projection[5] instanceof Document );
assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[5] ).getFields().size() );
assertEquals( "ID incorrect", 1001, projection[6] );
@@ -249,7 +249,7 @@
assertTrue( "DOCUMENT incorrect", projection[0] instanceof Document );
assertEquals( "DOCUMENT size incorrect", 5, ( (Document) projection[0] ).getFields().size() );
assertEquals( "THIS incorrect", projection[1], s.get( Employee.class, (Serializable) projection[4] ) );
- assertEquals( "SCORE incorrect", 1.0F, projection[2] );
+ assertEquals( "SCORE incorrect", 1.9162908F, projection[2] );
assertNull( "BOOST not removed", projection[3] );
assertEquals( "ID incorrect", 1001, projection[4] );
assertEquals( "id incorrect", 1001, projection[5] );
Modified: search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2008-11-10 18:14:53 UTC (rev 15540)
+++ search/trunk/src/test/org/hibernate/search/test/reader/ReaderPerfTestCase.java 2008-11-10 20:14:05 UTC (rev 15541)
@@ -13,6 +13,8 @@
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Query;
+import org.slf4j.Logger;
+
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
@@ -22,11 +24,15 @@
import org.hibernate.search.store.FSDirectoryProvider;
import org.hibernate.search.test.SearchTestCase;
import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
/**
* @author Emmanuel Bernard
*/
public abstract class ReaderPerfTestCase extends SearchTestCase {
+
+ private static final Logger log = LoggerFactory.make();
+
protected void setUp() throws Exception {
File sub = getBaseIndexDir();
sub.mkdir();
@@ -50,7 +56,9 @@
protected void tearDown() throws Exception {
super.tearDown();
- if ( getSessions() != null ) getSessions().close();
+ if ( getSessions() != null ) {
+ getSessions().close();
+ }
File sub = getBaseIndexDir();
FileHelper.delete( sub );
}
@@ -58,22 +66,24 @@
public boolean insert = true;
public void testConcurrency() throws Exception {
- Session s = openSession( );
+ Session s = openSession();
Transaction tx = s.beginTransaction();
- for ( int index = 0 ; index < 5000 ; index++ ) {
+ for ( int index = 0; index < 5000; index++ ) {
Detective detective = new Detective();
detective.setName( "John Doe " + index );
detective.setBadge( "123455" + index );
- detective.setPhysicalDescription( "Blond green eye etc etc");
+ detective.setPhysicalDescription( "Blond green eye etc etc" );
s.persist( detective );
Suspect suspect = new Suspect();
suspect.setName( "Jane Doe " + index );
- suspect.setPhysicalDescription( "brunette, short, 30-ish");
+ suspect.setPhysicalDescription( "brunette, short, 30-ish" );
if ( index % 20 == 0 ) {
suspect.setSuspectCharge( "thief liar " );
}
else {
- suspect.setSuspectCharge( " It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery. Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud.");
+ suspect.setSuspectCharge(
+ " It's 1875 in London. The police have captured career criminal Montmorency. In the process he has been grievously wounded and it is up to a young surgeon to treat his wounds. During his recovery Montmorency learns of the city's new sewer system and sees in it the perfect underground highway for his thievery. Washington Post columnist John Kelly recommends this title for middle schoolers, especially to be read aloud."
+ );
}
s.persist( suspect );
}
@@ -88,6 +98,7 @@
ReverseWork reverseWork = new ReverseWork( getSessions() );
long start = System.currentTimeMillis();
int iteration = 100;
+ log.info( "Starting worker threads." );
for ( int i = 0; i < iteration; i++ ) {
es.execute( work );
es.execute( reverseWork );
@@ -95,12 +106,11 @@
while ( work.count < iteration - 1 ) {
Thread.sleep( 20 );
}
- System.out.println( iteration + " iterations in " + nThreads + " threads: " + ( System
- .currentTimeMillis() - start ) );
+ log.debug( iteration + " iterations in " + nThreads + " threads: " + ( System.currentTimeMillis() - start ) );
}
protected class Work implements Runnable {
- private Random random = new Random( );
+ private Random random = new Random();
private SessionFactory sf;
public volatile int count = 0;
@@ -112,12 +122,13 @@
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
QueryParser parser = new MultiFieldQueryParser(
- new String[] {"name", "physicalDescription", "suspectCharge"},
- new StandardAnalyzer() );
+ new String[] { "name", "physicalDescription", "suspectCharge" },
+ new StandardAnalyzer()
+ );
FullTextQuery query = getQuery( "John Doe", parser, s );
assertTrue( query.getResultSize() != 0 );
- query = getQuery( "green", parser, s );
+ query = getQuery( "green", parser, s );
random.nextInt( query.getResultSize() - 15 );
query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
@@ -131,18 +142,18 @@
query = getQuery( "John Doe", parser, s );
assertTrue( query.getResultSize() != 0 );
- query = getQuery( "thief", parser, s );
+ query = getQuery( "thief", parser, s );
int firstResult = random.nextInt( query.getResultSize() - 15 );
query.setFirstResult( firstResult );
query.setMaxResults( 10 );
List result = query.list();
- Object object = result.get(0);
- if (insert && object instanceof Detective) {
- Detective detective = (Detective) object;
+ Object object = result.get( 0 );
+ if ( insert && object instanceof Detective ) {
+ Detective detective = ( Detective ) object;
detective.setPhysicalDescription( detective.getPhysicalDescription() + " Eye" + firstResult );
}
- else if (insert && object instanceof Suspect) {
- Suspect suspect = (Suspect) object;
+ else if ( insert && object instanceof Suspect ) {
+ Suspect suspect = ( Suspect ) object;
suspect.setPhysicalDescription( suspect.getPhysicalDescription() + " Eye" + firstResult );
}
tx.commit();
@@ -153,9 +164,9 @@
private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
Query luceneQuery = null;
try {
- luceneQuery = parser.parse(queryString);
+ luceneQuery = parser.parse( queryString );
}
- catch (ParseException e) {
+ catch ( ParseException e ) {
e.printStackTrace();
}
return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );
@@ -174,12 +185,13 @@
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
QueryParser parser = new MultiFieldQueryParser(
- new String[] {"name", "physicalDescription", "suspectCharge"},
- new StandardAnalyzer() );
+ new String[] { "name", "physicalDescription", "suspectCharge" },
+ new StandardAnalyzer()
+ );
FullTextQuery query = getQuery( "John Doe", parser, s );
assertTrue( query.getResultSize() != 0 );
- query = getQuery( "london", parser, s );
+ query = getQuery( "london", parser, s );
random.nextInt( query.getResultSize() - 15 );
query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
@@ -193,7 +205,7 @@
getQuery( "John Doe", parser, s );
assertTrue( query.getResultSize() != 0 );
- query = getQuery( "green", parser, s );
+ query = getQuery( "green", parser, s );
random.nextInt( query.getResultSize() - 15 );
query.setFirstResult( random.nextInt( query.getResultSize() - 15 ) );
query.setMaxResults( 10 );
@@ -205,9 +217,9 @@
private FullTextQuery getQuery(String queryString, QueryParser parser, Session s) {
Query luceneQuery = null;
try {
- luceneQuery = parser.parse(queryString);
+ luceneQuery = parser.parse( queryString );
}
- catch (ParseException e) {
+ catch ( ParseException e ) {
e.printStackTrace();
}
return Search.getFullTextSession( s ).createFullTextQuery( luceneQuery );
15 years, 6 months
Hibernate SVN: r15540 - core/branches/Branch_3_2/src/org/hibernate/hql/ast/tree.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-11-10 13:14:53 -0500 (Mon, 10 Nov 2008)
New Revision: 15540
Modified:
core/branches/Branch_3_2/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
Log:
HHH-530 : problem with parameter-pulling when row value constructor morphed on dialects not supporting
Modified: core/branches/Branch_3_2/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
===================================================================
--- core/branches/Branch_3_2/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 18:14:34 UTC (rev 15539)
+++ core/branches/Branch_3_2/src/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 18:14:53 UTC (rev 15540)
@@ -4,9 +4,9 @@
import org.hibernate.Hibernate;
import org.hibernate.TypeMismatchException;
import org.hibernate.HibernateException;
+import org.hibernate.param.ParameterSpecification;
import org.hibernate.util.StringHelper;
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
-import org.hibernate.dialect.HSQLDialect;
import org.hibernate.engine.SessionFactoryImplementor;
import antlr.SemanticException;
import antlr.collections.AST;
@@ -94,9 +94,18 @@
String[] lhsElementTexts = extractMutationTexts( getLeftHandOperand(), valueElements );
String[] rhsElementTexts = extractMutationTexts( getRightHandOperand(), valueElements );
+ ParameterSpecification lhsEmbeddedCompositeParameterSpecification =
+ getLeftHandOperand() == null || ( !ParameterNode.class.isInstance( getLeftHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getLeftHandOperand() ).getHqlParameterSpecification();
+
+ ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
+ getRightHandOperand() == null || ( !ParameterNode.class.isInstance( getRightHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getRightHandOperand() ).getHqlParameterSpecification();
+
AST container = this;
for ( int i = valueElements - 1; i > 0; i-- ) {
-
if ( i == 1 ) {
AST op1 = getASTFactory().create( comparisonType, comparisonText );
AST lhs1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[0] );
@@ -110,6 +119,16 @@
op2.setFirstChild( lhs2 );
lhs2.setNextSibling( rhs2 );
op1.setNextSibling( op2 );
+
+ // "pass along" our initial embedded parameter node(s) to the first generated
+ // sql fragment so that it can be handled later for parameter binding...
+ SqlFragment fragment = ( SqlFragment ) lhs1;
+ if ( lhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( lhsEmbeddedCompositeParameterSpecification );
+ }
+ if ( rhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( rhsEmbeddedCompositeParameterSpecification );
+ }
}
else {
AST op = getASTFactory().create( comparisonType, comparisonText );
15 years, 6 months
Hibernate SVN: r15539 - core/branches/Branch_3_3/core/src/main/java/org/hibernate/hql/ast/tree.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-11-10 13:14:34 -0500 (Mon, 10 Nov 2008)
New Revision: 15539
Modified:
core/branches/Branch_3_3/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
Log:
HHH-530 : problem with parameter-pulling when row value constructor morphed on dialects not supporting
Modified: core/branches/Branch_3_3/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
===================================================================
--- core/branches/Branch_3_3/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 18:14:18 UTC (rev 15538)
+++ core/branches/Branch_3_3/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 18:14:34 UTC (rev 15539)
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.hql.ast.tree;
@@ -28,6 +27,7 @@
import org.hibernate.Hibernate;
import org.hibernate.TypeMismatchException;
import org.hibernate.HibernateException;
+import org.hibernate.param.ParameterSpecification;
import org.hibernate.util.StringHelper;
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
import org.hibernate.engine.SessionFactoryImplementor;
@@ -117,9 +117,18 @@
String[] lhsElementTexts = extractMutationTexts( getLeftHandOperand(), valueElements );
String[] rhsElementTexts = extractMutationTexts( getRightHandOperand(), valueElements );
+ ParameterSpecification lhsEmbeddedCompositeParameterSpecification =
+ getLeftHandOperand() == null || ( !ParameterNode.class.isInstance( getLeftHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getLeftHandOperand() ).getHqlParameterSpecification();
+
+ ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
+ getRightHandOperand() == null || ( !ParameterNode.class.isInstance( getRightHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getRightHandOperand() ).getHqlParameterSpecification();
+
AST container = this;
for ( int i = valueElements - 1; i > 0; i-- ) {
-
if ( i == 1 ) {
AST op1 = getASTFactory().create( comparisonType, comparisonText );
AST lhs1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[0] );
@@ -133,6 +142,16 @@
op2.setFirstChild( lhs2 );
lhs2.setNextSibling( rhs2 );
op1.setNextSibling( op2 );
+
+ // "pass along" our initial embedded parameter node(s) to the first generated
+ // sql fragment so that it can be handled later for parameter binding...
+ SqlFragment fragment = ( SqlFragment ) lhs1;
+ if ( lhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( lhsEmbeddedCompositeParameterSpecification );
+ }
+ if ( rhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( rhsEmbeddedCompositeParameterSpecification );
+ }
}
else {
AST op = getASTFactory().create( comparisonType, comparisonText );
15 years, 6 months
Hibernate SVN: r15538 - core/trunk/core/src/main/java/org/hibernate/hql/ast/tree.
by hibernate-commits@lists.jboss.org
Author: steve.ebersole(a)jboss.com
Date: 2008-11-10 13:14:18 -0500 (Mon, 10 Nov 2008)
New Revision: 15538
Modified:
core/trunk/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
Log:
HHH-530 : problem with parameter-pulling when row value constructor morphed on dialects not supporting
Modified: core/trunk/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java
===================================================================
--- core/trunk/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 09:25:20 UTC (rev 15537)
+++ core/trunk/core/src/main/java/org/hibernate/hql/ast/tree/BinaryLogicOperatorNode.java 2008-11-10 18:14:18 UTC (rev 15538)
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.hql.ast.tree;
@@ -28,6 +27,7 @@
import org.hibernate.Hibernate;
import org.hibernate.TypeMismatchException;
import org.hibernate.HibernateException;
+import org.hibernate.param.ParameterSpecification;
import org.hibernate.util.StringHelper;
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
import org.hibernate.engine.SessionFactoryImplementor;
@@ -117,9 +117,18 @@
String[] lhsElementTexts = extractMutationTexts( getLeftHandOperand(), valueElements );
String[] rhsElementTexts = extractMutationTexts( getRightHandOperand(), valueElements );
+ ParameterSpecification lhsEmbeddedCompositeParameterSpecification =
+ getLeftHandOperand() == null || ( !ParameterNode.class.isInstance( getLeftHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getLeftHandOperand() ).getHqlParameterSpecification();
+
+ ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
+ getRightHandOperand() == null || ( !ParameterNode.class.isInstance( getRightHandOperand() ) )
+ ? null
+ : ( ( ParameterNode ) getRightHandOperand() ).getHqlParameterSpecification();
+
AST container = this;
for ( int i = valueElements - 1; i > 0; i-- ) {
-
if ( i == 1 ) {
AST op1 = getASTFactory().create( comparisonType, comparisonText );
AST lhs1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, lhsElementTexts[0] );
@@ -133,6 +142,16 @@
op2.setFirstChild( lhs2 );
lhs2.setNextSibling( rhs2 );
op1.setNextSibling( op2 );
+
+ // "pass along" our initial embedded parameter node(s) to the first generated
+ // sql fragment so that it can be handled later for parameter binding...
+ SqlFragment fragment = ( SqlFragment ) lhs1;
+ if ( lhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( lhsEmbeddedCompositeParameterSpecification );
+ }
+ if ( rhsEmbeddedCompositeParameterSpecification != null ) {
+ fragment.addEmbeddedParameter( rhsEmbeddedCompositeParameterSpecification );
+ }
}
else {
AST op = getASTFactory().create( comparisonType, comparisonText );
15 years, 6 months