Author: hardy.ferentschik
Date: 2008-11-05 15:06:43 -0500 (Wed, 05 Nov 2008)
New Revision: 15522
Modified:
search/trunk/src/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java
search/trunk/src/java/org/hibernate/search/util/FileHelper.java
search/trunk/src/test/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java
search/trunk/src/test/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java
search/trunk/src/test/org/hibernate/search/test/util/FileHelperTest.java
Log:
HSEARCH-266
* Added new method areInSync(File, File) to FileHelper
* Modified FSSlaveDirectoryProvider to not copy if the source and destination are still in
sync
* Updated tests
Modified: search/trunk/src/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java
===================================================================
---
search/trunk/src/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java 2008-11-05
20:03:42 UTC (rev 15521)
+++
search/trunk/src/java/org/hibernate/search/store/FSSlaveDirectoryProvider.java 2008-11-05
20:06:43 UTC (rev 15522)
@@ -25,25 +25,25 @@
* The base directory is represented by hibernate.search.<index>.indexBase
* The index is created in <base directory>/<index name>
* The source (aka copy) directory is built from <sourceBase>/<index name>
- *
+ * <p/>
* A copy is triggered every refresh seconds
*
* @author Emmanuel Bernard
* @author Sanne Grinovero
*/
public class FSSlaveDirectoryProvider implements DirectoryProvider<FSDirectory> {
-
+
private static final Logger log = LoggerFactory.make();
private final Timer timer = new Timer( true ); //daemon thread, the copy algorithm is
robust
-
+
private volatile int current; //used also as memory barrier of all other values, which
are set once.
-
+
//variables having visibility granted by a read of "current"
private FSDirectory directory1;
private FSDirectory directory2;
private String indexName;
private long copyChunkSize;
-
+
//variables needed between initialize and start (used by same thread: no special care
needed)
private File sourceIndexDir;
private File indexDir;
@@ -55,7 +55,7 @@
this.directoryProviderName = directoryProviderName;
//source guessing
sourceIndexDir = DirectoryProviderHelper.getSourceDirectory( directoryProviderName,
properties, false );
- if ( ! new File( sourceIndexDir, "current1" ).exists() && ! new File(
sourceIndexDir, "current2" ).exists() ) {
+ if ( !new File( sourceIndexDir, "current1" ).exists() && !new File(
sourceIndexDir, "current2" ).exists() ) {
throw new IllegalStateException( "No current marker in source directory" );
}
log.debug( "Source directory: {}", sourceIndexDir.getPath() );
@@ -64,7 +64,7 @@
try {
indexName = indexDir.getCanonicalPath();
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new SearchException( "Unable to initialize index: " +
directoryProviderName, e );
}
copyChunkSize = DirectoryProviderHelper.getCopyBufferSize( directoryProviderName,
properties );
@@ -90,33 +90,35 @@
}
else {
//no default
- log.debug( "Setting directory 1 as current");
+ log.debug( "Setting directory 1 as current" );
currentToBe = 1;
File destinationFile = new File( indexDir, Integer.valueOf( readCurrentState
).toString() );
int sourceCurrent;
- if ( new File( sourceIndexDir, "current1").exists() ) {
+ if ( new File( sourceIndexDir, "current1" ).exists() ) {
sourceCurrent = 1;
}
- else if ( new File( sourceIndexDir, "current2").exists() ) {
+ else if ( new File( sourceIndexDir, "current2" ).exists() ) {
sourceCurrent = 2;
}
else {
throw new AssertionFailure( "No current file marker found in source directory:
" + sourceIndexDir.getPath() );
}
try {
- FileHelper.synchronize( new File( sourceIndexDir, String.valueOf( sourceCurrent )
),
- destinationFile, true, copyChunkSize );
+ FileHelper.synchronize(
+ new File( sourceIndexDir, String.valueOf( sourceCurrent ) ),
+ destinationFile, true, copyChunkSize
+ );
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new SearchException( "Unable to synchronize directory: " +
indexName, e );
}
- if ( ! currentMarker.createNewFile() ) {
+ if ( !currentMarker.createNewFile() ) {
throw new SearchException( "Unable to create the directory marker file: "
+ indexName );
}
}
- log.debug( "Current directory: {}", currentToBe);
+ log.debug( "Current directory: {}", currentToBe );
}
- catch (IOException e) {
+ catch ( IOException e ) {
throw new SearchException( "Unable to initialize index: " +
directoryProviderName, e );
}
TimerTask task = new TriggerTask( sourceIndexDir, indexDir );
@@ -127,10 +129,10 @@
public FSDirectory getDirectory() {
int readState = current;// to have the read consistent in the next two
"if"s.
- if (readState == 1) {
+ if ( readState == 1 ) {
return directory1;
}
- else if (readState == 2) {
+ else if ( readState == 2 ) {
return directory2;
}
else {
@@ -143,9 +145,13 @@
// this code is actually broken since the value change after initialize call
// but from a practical POV this is fine since we only call this method
// after initialize call
- if ( obj == this ) return true;
- if ( obj == null || !( obj instanceof FSSlaveDirectoryProvider ) ) return false;
- FSSlaveDirectoryProvider other = (FSSlaveDirectoryProvider)obj;
+ if ( obj == this ) {
+ return true;
+ }
+ if ( obj == null || !( obj instanceof FSSlaveDirectoryProvider ) ) {
+ return false;
+ }
+ FSSlaveDirectoryProvider other = ( FSSlaveDirectoryProvider ) obj;
//need to break memory barriers on both instances:
@SuppressWarnings("unused")
int readCurrentState = this.current; //unneded value, but ensure visibility of
indexName
@@ -171,7 +177,7 @@
public TriggerTask(File sourceIndexDir, File destination) {
executor = Executors.newSingleThreadExecutor();
- copyTask = new CopyDirectory( sourceIndexDir, destination );
+ copyTask = new CopyDirectory( sourceIndexDir, destination );
}
public void run() {
@@ -179,10 +185,10 @@
executor.execute( copyTask );
}
else {
- if (log.isTraceEnabled()) {
+ if ( log.isTraceEnabled() ) {
@SuppressWarnings("unused")
int unneeded = current;//ensure visibility of indexName in Timer threads.
- log.trace( "Skipping directory synchronization, previous work still in
progress: {}", indexName);
+ log.trace( "Skipping directory synchronization, previous work still in
progress: {}", indexName );
}
}
}
@@ -201,45 +207,71 @@
public void run() {
long start = System.currentTimeMillis();
try {
- int oldIndex = current;
- int index = oldIndex == 1 ? 2 : 1;
- File sourceFile;
- if ( new File( source, "current1" ).exists() ) {
- sourceFile = new File(source, "1");
- }
- else if ( new File( source, "current2" ).exists() ) {
- sourceFile = new File(source, "2");
- }
- else {
+ File sourceFile = determineCurrentSourceFile();
+ if ( sourceFile == null ) {
log.error( "Unable to determine current in source directory" );
return;
}
+
+ // check whether a copy is needed at all
+ File currentDestinationFile = new File( destination, Integer.valueOf( current
).toString() );
+ try {
+ if ( FileHelper.areInSync( sourceFile, currentDestinationFile ) ) {
+ if ( log.isTraceEnabled() ) {
+ log.trace( "Source and destination directory are in sync. No copying
required." );
+ }
+ return;
+ }
+ }
+ catch ( IOException ioe ) {
+ log.warn( "Unable to compare {} with {}.", sourceFile.getName(),
currentDestinationFile.getName() );
+ }
+
+ // copy is required
+ int oldIndex = current;
+ int index = oldIndex == 1 ? 2 : 1;
File destinationFile = new File( destination, Integer.valueOf( index ).toString() );
try {
log.trace( "Copying {} into {}", sourceFile, destinationFile );
FileHelper.synchronize( sourceFile, destinationFile, true, copyChunkSize );
current = index;
+ log.trace( "Copy for {} took {} ms", indexName, (
System.currentTimeMillis() - start ) );
}
- catch (IOException e) {
+ catch ( IOException e ) {
//don't change current
- log.error( "Unable to synchronize " + indexName, e);
+ log.error( "Unable to synchronize " + indexName, e );
return;
}
- if ( ! new File( indexName, "current" + oldIndex ).delete() ) {
+ if ( !new File( indexName, "current" + oldIndex ).delete() ) {
log.warn( "Unable to remove previous marker file in " + indexName );
}
try {
new File( indexName, "current" + index ).createNewFile();
}
- catch( IOException e ) {
+ catch ( IOException e ) {
log.warn( "Unable to create current marker file in " + indexName, e );
}
}
finally {
inProgress.set( false );
}
- log.trace( "Copy for {} took {} ms", indexName, (System.currentTimeMillis()
- start) );
}
+
+ /**
+ * @return Return a file to the currently active source directory. Tests for the files
"current1" and
+ * "current2" in order to determine which is the current directory.
If there marker file does not exists
+ * <code>null</code> is returned.
+ */
+ private File determineCurrentSourceFile() {
+ File sourceFile = null;
+ if ( new File( source, "current1" ).exists() ) {
+ sourceFile = new File( source, "1" );
+ }
+ else if ( new File( source, "current2" ).exists() ) {
+ sourceFile = new File( source, "2" );
+ }
+ return sourceFile;
+ }
}
public void stop() {
@@ -249,13 +281,13 @@
try {
directory1.close();
}
- catch (Exception e) {
+ catch ( Exception e ) {
log.error( "Unable to properly close Lucene directory {}" +
directory1.getFile(), e );
}
try {
directory2.close();
}
- catch (Exception e) {
+ catch ( Exception e ) {
log.error( "Unable to properly close Lucene directory {}" +
directory2.getFile(), e );
}
}
Modified: search/trunk/src/java/org/hibernate/search/util/FileHelper.java
===================================================================
--- search/trunk/src/java/org/hibernate/search/util/FileHelper.java 2008-11-05 20:03:42
UTC (rev 15521)
+++ search/trunk/src/java/org/hibernate/search/util/FileHelper.java 2008-11-05 20:06:43
UTC (rev 15522)
@@ -2,27 +2,75 @@
package org.hibernate.search.util;
import java.io.File;
-import java.io.IOException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.util.Arrays;
+import java.util.HashSet;
import java.util.Set;
-import java.util.HashSet;
-import java.util.Arrays;
-import java.nio.channels.FileChannel;
import org.slf4j.Logger;
/**
+ * Utility class for synchronizing files/directories.
+ *
* @author Emmanuel Bernard
* @author Sanne Grinovero
+ * @author Hardy Ferentschik
*/
public abstract class FileHelper {
- private static final Logger log = LoggerFactory.make();
+ private static final Logger log = LoggerFactory.make();
private static final int FAT_PRECISION = 2000;
public static final long DEFAULT_COPY_BUFFER_SIZE = 16 * 1024 * 1024; // 16 MB
-
+
+ public static boolean areInSync(File source, File destination) throws IOException {
+ if ( source.isDirectory() ) {
+ if ( !destination.exists() ) {
+ return false;
+ }
+ else if ( !destination.isDirectory() ) {
+ throw new IOException(
+ "Source and Destination not of the same type:"
+ + source.getCanonicalPath() + " , " + destination.getCanonicalPath()
+ );
+ }
+ String[] sources = source.list();
+ Set<String> srcNames = new HashSet<String>( Arrays.asList( sources ) );
+ String[] dests = destination.list();
+
+ // check for files in destination and not in source
+ for ( String fileName : dests ) {
+ if ( !srcNames.contains( fileName ) ) {
+ return false;
+ }
+ }
+
+ boolean inSync = true;
+ for ( String fileName : sources ) {
+ File srcFile = new File( source, fileName );
+ File destFile = new File( destination, fileName );
+ if ( !areInSync( srcFile, destFile ) ) {
+ inSync = false;
+ break;
+ }
+ }
+ return inSync;
+ }
+ else {
+ if ( destination.exists() && destination.isFile() ) {
+ long sts = source.lastModified() / FAT_PRECISION;
+ long dts = destination.lastModified() / FAT_PRECISION;
+ return sts == dts;
+ }
+ else {
+ return false;
+ }
+ }
+ }
+
public static void synchronize(File source, File destination, boolean smart) throws
IOException {
synchronize( source, destination, smart, DEFAULT_COPY_BUFFER_SIZE );
}
@@ -33,29 +81,31 @@
chunkSize = DEFAULT_COPY_BUFFER_SIZE;
}
if ( source.isDirectory() ) {
- if ( ! destination.exists() ) {
- if ( ! destination.mkdirs() ){
- throw new IOException("Could not create path " + destination);
+ if ( !destination.exists() ) {
+ if ( !destination.mkdirs() ) {
+ throw new IOException( "Could not create path " + destination );
}
}
- else if ( ! destination.isDirectory() ) {
- throw new IOException("Source and Destination not of the same type:"
- + source.getCanonicalPath() + " , " + destination.getCanonicalPath() );
+ else if ( !destination.isDirectory() ) {
+ throw new IOException(
+ "Source and Destination not of the same type:"
+ + source.getCanonicalPath() + " , " + destination.getCanonicalPath()
+ );
}
String[] sources = source.list();
Set<String> srcNames = new HashSet<String>( Arrays.asList( sources ) );
String[] dests = destination.list();
//delete files not present in source
- for (String fileName : dests) {
- if ( ! srcNames.contains( fileName ) ) {
- delete( new File(destination, fileName) );
+ for ( String fileName : dests ) {
+ if ( !srcNames.contains( fileName ) ) {
+ delete( new File( destination, fileName ) );
}
}
//copy each file from source
- for (String fileName : sources) {
- File srcFile = new File(source, fileName);
- File destFile = new File(destination, fileName);
+ for ( String fileName : sources ) {
+ File srcFile = new File( source, fileName );
+ File destFile = new File( destination, fileName );
synchronize( srcFile, destFile, smart, chunkSize );
}
}
@@ -68,11 +118,11 @@
long dts = destination.lastModified() / FAT_PRECISION;
//do not copy if smart and same timestamp and same length
if ( !smart || sts == 0 || sts != dts || source.length() != destination.length() ) {
- copyFile(source, destination, chunkSize);
+ copyFile( source, destination, chunkSize );
}
}
else {
- copyFile(source, destination, chunkSize);
+ copyFile( source, destination, chunkSize );
}
}
}
@@ -81,7 +131,7 @@
FileInputStream is = null;
FileOutputStream os = null;
try {
- is = new FileInputStream(srcFile);
+ is = new FileInputStream( srcFile );
FileChannel iChannel = is.getChannel();
os = new FileOutputStream( destFile, false );
FileChannel oChannel = os.getChannel();
@@ -91,21 +141,26 @@
long iterationBytes = Math.min( todoBytes, chunkSize );
long transferredLength = oChannel.transferFrom( iChannel, doneBytes, iterationBytes
);
if ( iterationBytes != transferredLength ) {
- throw new IOException( "Error during file transfer: expected "
- + iterationBytes + " bytes, only "+ transferredLength + " bytes
copied." );
+ throw new IOException(
+ "Error during file transfer: expected "
+ + iterationBytes + " bytes, only " + transferredLength + " bytes
copied."
+ );
}
doneBytes += transferredLength;
todoBytes -= transferredLength;
}
}
finally {
- if (is != null) is.close();
- if (os != null) os.close();
+ if ( is != null ) {
+ is.close();
+ }
+ if ( os != null ) {
+ os.close();
+ }
}
boolean successTimestampOp = destFile.setLastModified( srcFile.lastModified() );
- if ( ! successTimestampOp ) {
- log.warn( "Could not change timestamp for " + destFile +
- ". Index synchronization may be slow." );
+ if ( !successTimestampOp ) {
+ log.warn( "Could not change timestamp for {}. Index synchronization may be
slow.", destFile );
}
}
@@ -116,10 +171,9 @@
}
}
if ( file.exists() ) {
- if ( ! file.delete() ) {
- log.error( "Could not delete " + file );
+ if ( !file.delete() ) {
+ log.error( "Could not delete {}", file );
}
}
}
-
}
Modified:
search/trunk/src/test/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java 2008-11-05
20:03:42 UTC (rev 15521)
+++
search/trunk/src/test/org/hibernate/search/test/directoryProvider/FSSlaveAndMasterDPTest.java 2008-11-05
20:06:43 UTC (rev 15522)
@@ -7,114 +7,155 @@
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.queryParser.QueryParser;
+import org.slf4j.Logger;
+
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
/**
+ * Test case for master/slave directories.
+ *
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public class FSSlaveAndMasterDPTest extends MultipleSFTestCase {
-
+
+ private static final Logger log = LoggerFactory.make();
+
private static File root;
+
static {
- String buildDir = System.getProperty("build.dir");
- if (buildDir == null) {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
buildDir = ".";
}
- root = new File(buildDir, "lucenedirs");
+ root = new File( buildDir, "lucenedirs" );
+ log.info( "Using {} as test directory.", root.getAbsolutePath() );
}
- @SuppressWarnings( { "PointlessArithmeticExpression" } )
+ /**
+ * The lucene index directory which is shared bewtween master and slave.
+ */
+ private String masterCopy = "/master/copy";
+
+ /**
+ * The lucene index directory which is specific to the master node.
+ */
+ private String masterMain = "/master/main";
+
+ /**
+ * The lucene index directory which is specific to the slave node.
+ */
+ private String slave = "/slave";
+
+ /**
+ * Verifies that copies of the master get properly copied to the slaves.
+ *
+ * @throws Exception in case the test fails.
+ */
public void testProperCopy() throws Exception {
- Session s1 = getSessionFactories()[0].openSession( );
- SnowStorm sn = new SnowStorm();
- sn.setDate( new Date() );
- sn.setLocation( "Dallas, TX, USA");
- FullTextSession fts2 = Search.getFullTextSession( getSessionFactories()[1].openSession(
) );
- QueryParser parser = new QueryParser("id", new StopAnalyzer() );
- List result = fts2.createFullTextQuery( parser.parse( "location:texas" )
).list();
+ // assert that the salve index is empty
+ FullTextSession fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ QueryParser parser = new QueryParser( "id", new StopAnalyzer() );
+ List result = fullTextSession.createFullTextQuery( parser.parse(
"location:texas" ) ).list();
assertEquals( "No copy yet, fresh index expected", 0, result.size() );
+ fullTextSession.close();
+
- s1.persist( sn );
- s1.flush(); //we don' commit so we need to flush manually
+ // create an entity on the master and persist it in order to index it
+ Session session = getMasterSession();
+ SnowStorm sn = new SnowStorm();
+ sn.setDate( new Date() );
+ sn.setLocation( "Dallas, TX, USA" );
+ session.persist( sn );
+ session.flush(); //we don' commit so we need to flush manually
+ session.close();
- fts2.close();
- s1.close();
+ int waitPeriodMilli = 2010; // wait a bit more than 2 refresh periods (one master /
one slave) - 2 * 1 * 1000 + 10
+ Thread.sleep( waitPeriodMilli );
- int waitPeroid = 2 * 1 * 1000 + 10; //wait a bit more than 2 refresh (one master / one
slave)
- Thread.sleep( waitPeroid );
-
- //temp test original
- fts2 = Search.getFullTextSession( getSessionFactories()[0].openSession( ) );
- result = fts2.createFullTextQuery( parser.parse( "location:dallas" )
).list();
+ // assert that the master hass indexed the snowstorm
+ fullTextSession = Search.getFullTextSession( getMasterSession() );
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas"
) ).list();
assertEquals( "Original should get one", 1, result.size() );
+ fullTextSession.close();
- fts2 = Search.getFullTextSession( getSessionFactories()[1].openSession( ) );
- result = fts2.createFullTextQuery( parser.parse( "location:dallas" )
).list();
- assertEquals("First copy did not work out", 1, result.size() );
+ // assert that index got copied to the salve as well
+ log.info("Searching slave");
+ fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ result = fullTextSession.createFullTextQuery( parser.parse( "location:dallas"
) ).list();
+ assertEquals( "First copy did not work out", 1, result.size() );
+ fullTextSession.close();
- s1 = getSessionFactories()[0].openSession( );
+ // add a new snowstorm to the master
+ session = getMasterSession();
sn = new SnowStorm();
sn.setDate( new Date() );
- sn.setLocation( "Chennai, India");
+ sn.setLocation( "Chennai, India" );
+ session.persist( sn );
+ session.flush(); //we don' commit so we need to flush manually
+ session.close();
- s1.persist( sn );
- s1.flush(); //we don' commit so we need to flush manually
+ Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one
slave)
- fts2.close();
- s1.close();
+ // assert that the new snowstorm made it into the slave
+ fullTextSession = Search.getFullTextSession( getSlaveSession() );
+ result = fullTextSession.createFullTextQuery( parser.parse(
"location:chennai" ) ).list();
+ assertEquals( "Second copy did not work out", 1, result.size() );
+ fullTextSession.close();
- Thread.sleep( waitPeroid ); //wait a bit more than 2 refresh (one master / one slave)
-
- fts2 = Search.getFullTextSession( getSessionFactories()[1].openSession( ) );
- result = fts2.createFullTextQuery( parser.parse( "location:chennai" )
).list();
- assertEquals("Second copy did not work out", 1, result.size() );
-
- s1 = getSessionFactories()[0].openSession( );
+ session = getMasterSession();
sn = new SnowStorm();
sn.setDate( new Date() );
- sn.setLocation( "Melbourne, Australia");
+ sn.setLocation( "Melbourne, Australia" );
+ session.persist( sn );
+ session.flush(); //we don' commit so we need to flush manually
+ session.close();
- s1.persist( sn );
- s1.flush(); //we don' commit so we need to flush manually
+ Thread.sleep( waitPeriodMilli ); //wait a bit more than 2 refresh (one master / one
slave)
- fts2.close();
- s1.close();
+ // once more - assert that the new snowstorm made it into the slave
+ fullTextSession = Search.getFullTextSession( getSessionFactories()[1].openSession() );
+ result = fullTextSession.createFullTextQuery( parser.parse(
"location:melbourne" ) ).list();
+ assertEquals( "Third copy did not work out", 1, result.size() );
- Thread.sleep( waitPeroid ); //wait a bit more than 2 refresh (one master / one slave)
-
- fts2 = Search.getFullTextSession( getSessionFactories()[1].openSession( ) );
- result = fts2.createFullTextQuery( parser.parse( "location:melbourne" )
).list();
- assertEquals("Third copy did not work out", 1, result.size() );
-
- fts2.close();
- //run the searchfactory.close() operations
+ fullTextSession.close();
for ( SessionFactory sf : getSessionFactories() ) {
sf.close();
}
}
+ private Session getMasterSession() {
+ return getSessionFactories()[0].openSession();
+ }
+
+ private Session getSlaveSession() {
+ return getSessionFactories()[1].openSession();
+ }
+
protected void setUp() throws Exception {
root.mkdir();
- File master = new File(root, "master/main");
+ File master = new File( root, masterMain );
master.mkdirs();
- master = new File(root, "master/copy");
+ master = new File( root, masterCopy );
master.mkdirs();
- File slave = new File(root, "slave");
- slave.mkdir();
-
+ File slaveFile = new File( root, slave );
+ slaveFile.mkdir();
+
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
+ log.info( "Deleting test directory {} ", root.getAbsolutePath() );
FileHelper.delete( root );
}
@@ -122,7 +163,6 @@
return 2;
}
- @SuppressWarnings("unchecked")
protected Class[] getMappings() {
return new Class[] {
SnowStorm.class
@@ -131,15 +171,19 @@
protected void configure(Configuration[] cfg) {
//master
- cfg[0].setProperty( "hibernate.search.default.sourceBase",
root.getAbsolutePath() + "/master/copy");
- cfg[0].setProperty( "hibernate.search.default.indexBase",
root.getAbsolutePath() + "/master/main");
- cfg[0].setProperty( "hibernate.search.default.refresh", "1");
//every minute
- cfg[0].setProperty( "hibernate.search.default.directory_provider",
"org.hibernate.search.store.FSMasterDirectoryProvider");
+ cfg[0].setProperty( "hibernate.search.default.sourceBase",
root.getAbsolutePath() + masterCopy );
+ cfg[0].setProperty( "hibernate.search.default.indexBase",
root.getAbsolutePath() + masterMain );
+ cfg[0].setProperty( "hibernate.search.default.refresh", "1" );
//every second
+ cfg[0].setProperty(
+ "hibernate.search.default.directory_provider",
"org.hibernate.search.store.FSMasterDirectoryProvider"
+ );
//slave(s)
- cfg[1].setProperty( "hibernate.search.default.sourceBase",
root.getAbsolutePath() + "/master/copy");
- cfg[1].setProperty( "hibernate.search.default.indexBase",
root.getAbsolutePath() + "/slave");
- cfg[1].setProperty( "hibernate.search.default.refresh", "1");
//every minute
- cfg[1].setProperty( "hibernate.search.default.directory_provider",
"org.hibernate.search.store.FSSlaveDirectoryProvider");
+ cfg[1].setProperty( "hibernate.search.default.sourceBase",
root.getAbsolutePath() + masterCopy );
+ cfg[1].setProperty( "hibernate.search.default.indexBase",
root.getAbsolutePath() + slave );
+ cfg[1].setProperty( "hibernate.search.default.refresh", "1" );
//every second
+ cfg[1].setProperty(
+ "hibernate.search.default.directory_provider",
"org.hibernate.search.store.FSSlaveDirectoryProvider"
+ );
}
}
Modified:
search/trunk/src/test/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java
===================================================================
---
search/trunk/src/test/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java 2008-11-05
20:03:42 UTC (rev 15521)
+++
search/trunk/src/test/org/hibernate/search/test/directoryProvider/MultipleSFTestCase.java 2008-11-05
20:06:43 UTC (rev 15522)
@@ -4,6 +4,7 @@
import java.io.InputStream;
import junit.framework.TestCase;
+
import org.hibernate.SessionFactory;
import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.Configuration;
@@ -12,7 +13,7 @@
/**
* Build multiple session factories from the same set of classes
- * The configuration can be altered overriding the #configure() method
+ * The configuration can be altered overriding {@link #configure}.
*
* @author Emmanuel Bernard
*/
@@ -26,32 +27,40 @@
protected abstract int getSFNbrs();
protected void buildSessionFactories(Class[] classes, String[] packages, String[]
xmlFiles) throws Exception {
- if (sessionFactories == null) sessionFactories = new SessionFactory[ getSFNbrs() ];
- if (cfgs == null) cfgs = new AnnotationConfiguration[ getSFNbrs() ];
- for (SessionFactory sf : sessionFactories ) if ( sf != null ) sf.close();
- for (int sfIndex = 0 ; sfIndex < getSFNbrs() ; sfIndex++ ) {
+ if ( sessionFactories == null ) {
+ sessionFactories = new SessionFactory[getSFNbrs()];
+ }
+ if ( cfgs == null ) {
+ cfgs = new AnnotationConfiguration[getSFNbrs()];
+ }
+ for ( SessionFactory sf : sessionFactories ) {
+ if ( sf != null ) {
+ sf.close();
+ }
+ }
+ for ( int sfIndex = 0; sfIndex < getSFNbrs(); sfIndex++ ) {
cfgs[sfIndex] = new AnnotationConfiguration();
}
configure( cfgs );
- for (int sfIndex = 0 ; sfIndex < getSFNbrs() ; sfIndex++ ) {
+ for ( int sfIndex = 0; sfIndex < getSFNbrs(); sfIndex++ ) {
try {
if ( recreateSchema() ) {
cfgs[sfIndex].setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
}
- for ( int i = 0; i < packages.length; i++ ) {
- cfgs[sfIndex].addPackage( packages[i] );
+ for ( String aPackage : packages ) {
+ cfgs[sfIndex].addPackage( aPackage );
}
- for ( int i = 0; i < classes.length; i++ ) {
- cfgs[sfIndex].addAnnotatedClass( classes[i] );
+ for ( Class aClass : classes ) {
+ cfgs[sfIndex].addAnnotatedClass( aClass );
}
- for ( int i = 0; i < xmlFiles.length; i++ ) {
- InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(
xmlFiles[i] );
+ for ( String xmlFile : xmlFiles ) {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(
xmlFile );
cfgs[sfIndex].addInputStream( is );
}
setDialect( Dialect.getDialect() );
sessionFactories[sfIndex] = cfgs[sfIndex].buildSessionFactory( /*new
TestInterceptor()*/ );
}
- catch (Exception e) {
+ catch ( Exception e ) {
e.printStackTrace();
throw e;
}
@@ -68,11 +77,11 @@
protected abstract Class[] getMappings();
protected String[] getAnnotatedPackages() {
- return new String[]{};
+ return new String[] { };
}
protected String[] getXmlFiles() {
- return new String[]{};
+ return new String[] { };
}
private void setDialect(Dialect dialect) {
@@ -83,7 +92,7 @@
return dialect;
}
- protected abstract void configure(Configuration[] cfg) ;
+ protected abstract void configure(Configuration[] cfg);
protected boolean recreateSchema() {
return true;
Modified: search/trunk/src/test/org/hibernate/search/test/util/FileHelperTest.java
===================================================================
--- search/trunk/src/test/org/hibernate/search/test/util/FileHelperTest.java 2008-11-05
20:03:42 UTC (rev 15521)
+++ search/trunk/src/test/org/hibernate/search/test/util/FileHelperTest.java 2008-11-05
20:06:43 UTC (rev 15522)
@@ -6,35 +6,49 @@
import java.io.IOException;
import junit.framework.TestCase;
+import org.slf4j.Logger;
+
import org.hibernate.search.util.FileHelper;
+import org.hibernate.search.util.LoggerFactory;
/**
* @author Emmanuel Bernard
+ * @author Hardy Ferentschik
*/
public class FileHelperTest extends TestCase {
- public void testTiti() throws Exception {
- File titi = new File("file:/c:/titi", "file:/d:/toito");
- assertFalse ( titi.exists() );
+ private static final Logger log = LoggerFactory.make();
+
+ private static File root;
+
+ static {
+ String buildDir = System.getProperty( "build.dir" );
+ if ( buildDir == null ) {
+ buildDir = ".";
+ }
+ root = new File( buildDir, "filehelper" );
+ log.info( "Using {} as test directory.", root.getAbsolutePath() );
}
- protected void setUp() throws Exception {
- super.setUp();
- File dir = new File("./filehelpersrc");
- dir.mkdir();
- String name = "a";
- createFile( dir, name );
- name = "b";
- createFile( dir, name );
- dir = new File(dir, "subdir");
- dir.mkdir();
- name = "c";
- createFile( dir, name );
+ /**
+ * Source directory
+ */
+ private String srcDir = "filehelpersrc";
+
+ /**
+ * Destination directory
+ */
+ private String destDir = "filehelperdest";
+
+
+ private File createFile(File dir, String name) throws IOException {
+ File file = new File( dir, name );
+ file.createNewFile();
+ writeDummyDataToFile( file );
+ return file;
}
- private void createFile(File dir, String name) throws IOException {
- File a = new File(dir, name);
- a.createNewFile();
- FileOutputStream os = new FileOutputStream( a, false );
+ private void writeDummyDataToFile(File file) throws IOException {
+ FileOutputStream os = new FileOutputStream( file, true );
os.write( 1 );
os.write( 2 );
os.write( 3 );
@@ -44,39 +58,58 @@
protected void tearDown() throws Exception {
super.setUp();
- File dir = new File("./filehelpersrc");
+ File dir = new File( root, srcDir );
FileHelper.delete( dir );
- dir = new File("./filehelperdest");
+ dir = new File( root, destDir );
FileHelper.delete( dir );
}
public void testSynchronize() throws Exception {
- File src = new File("./filehelpersrc");
- File dest = new File("./filehelperdest");
+ // create a src directory structure
+ File src = new File( root, srcDir );
+ src.mkdirs();
+ String name = "a";
+ createFile( src, name );
+ name = "b";
+ createFile( src, name );
+ File subDir = new File( src, "subdir" );
+ subDir.mkdirs();
+ name = "c";
+ createFile( subDir, name );
+
+ // create destination and sync
+ File dest = new File( root, destDir );
+ assertFalse( "Directories should be out of sync", FileHelper.areInSync( src,
dest ) );
FileHelper.synchronize( src, dest, true );
- File test = new File(dest, "b");
- assertTrue( test.exists() );
- test = new File( new File(dest, "subdir"), "c");
- assertTrue( test.exists() );
+ assertTrue( "Directories should be in sync", FileHelper.areInSync( src, dest
) );
+ File destTestFile1 = new File( dest, "b" );
+ assertTrue( destTestFile1.exists() );
+ File destTestFile2 = new File( new File( dest, "subdir" ), "c" );
+ assertTrue( destTestFile2.exists() );
- //change
- Thread.sleep( 2*2000 );
- test = new File( src, "c");
- FileOutputStream os = new FileOutputStream( test, true );
- os.write( 1 );
- os.write( 2 );
- os.write( 3 );
- os.flush();
- os.close();
- File destTest = new File(dest, "c");
- assertNotSame( test.lastModified(), destTest.lastModified() );
+ // create a new file in destination which does not exists in src. should be deleted
after next sync
+ File destTestFile3 = createFile( dest, "foo" );
+
+ // create a file in the src directory and write some data to it
+ File srcTestFile = new File( src, "c" );
+ writeDummyDataToFile( srcTestFile );
+ File destTestFile = new File( dest, "c" );
+ assertNotSame( srcTestFile.lastModified(), destTestFile.lastModified() );
+ assertFalse( "Directories should be out of sync", FileHelper.areInSync( src,
dest ) );
+
FileHelper.synchronize( src, dest, true );
- assertEquals( test.lastModified(), destTest.lastModified() );
- assertEquals( test.length(), destTest.length() );
- //delete file
- test.delete();
+ assertTrue("Directories should be in sync", FileHelper.areInSync( src, dest
));
+ assertEquals( srcTestFile.lastModified(), destTestFile.lastModified() );
+ assertEquals( srcTestFile.length(), destTestFile.length() );
+ assertTrue( destTestFile1.exists() );
+ assertTrue( destTestFile2.exists() );
+ assertTrue( !destTestFile3.exists() );
+
+ // delete src test file
+ srcTestFile.delete();
FileHelper.synchronize( src, dest, true );
- assertTrue( ! destTest.exists() );
+ assertTrue( !destTestFile.exists() );
+ assertTrue("Directories should be in sync", FileHelper.areInSync( src, dest
));
}
}