[infinispan-commits] Infinispan SVN: r126 - in trunk: core/src/main/java/org/infinispan/loader/file and 4 other directories.
infinispan-commits at lists.jboss.org
infinispan-commits at lists.jboss.org
Wed Apr 15 10:34:46 EDT 2009
Author: adriancole
Date: 2009-04-15 10:34:45 -0400 (Wed, 15 Apr 2009)
New Revision: 126
Removed:
trunk/core/src/test/java/org/infinispan/loader/jdbc/
trunk/core/src/test/java/org/infinispan/loader/s3/
Modified:
trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeCacheStoreIntegrationTest.java
trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeLearningTest.java
trunk/core/src/main/java/org/infinispan/loader/file/FileCacheStore.java
trunk/core/src/test/java/org/infinispan/loader/file/FileCacheStoreTest.java
trunk/core/src/test/java/org/infinispan/test/TestingUtil.java
trunk/core/src/test/java/org/infinispan/test/testng/SuiteResourcesAndLogTest.java
Log:
ISPN-52 changed to use basedir from surefire to ensure tests work from aggregator
Modified: trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeCacheStoreIntegrationTest.java
===================================================================
--- trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeCacheStoreIntegrationTest.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeCacheStoreIntegrationTest.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -1,6 +1,7 @@
package org.infinispan.loader.bdbje;
import org.easymock.EasyMock;
+import org.infinispan.container.entries.InternalEntryFactory;
import org.infinispan.loader.BaseCacheStoreTest;
import org.infinispan.loader.CacheLoaderException;
import org.infinispan.loader.CacheStore;
@@ -9,10 +10,13 @@
import org.infinispan.loader.modifications.Remove;
import org.infinispan.loader.modifications.Store;
import org.infinispan.test.TestingUtil;
-import org.infinispan.container.entries.InternalEntryFactory;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import javax.transaction.Transaction;
+import java.io.File;
import java.util.ArrayList;
import java.util.List;
@@ -24,86 +28,97 @@
@Test(groups = "unit", enabled = true, testName = "loader.bdbje.BdbjeCacheStoreIntegrationTest")
public class BdbjeCacheStoreIntegrationTest extends BaseCacheStoreTest {
- protected CacheStore createCacheStore() throws CacheLoaderException {
- CacheStore cs = new BdbjeCacheStore();
- String tmpDir = TestingUtil.TEST_FILES;
- String tmpCLLoc = tmpDir + "/Horizon-BdbjeCacheStoreIntegrationTest";
- TestingUtil.recursiveFileRemove(tmpCLLoc);
+ private String tmpDirectory;
- BdbjeCacheStoreConfig cfg = new BdbjeCacheStoreConfig();
- cfg.setLocation(tmpCLLoc);
- cfg.setPurgeSynchronously(true);
- cs.init(cfg, getCache(), getMarshaller());
- cs.start();
- return cs;
- }
+ @BeforeTest
+ @Parameters({"basedir"})
+ protected void setUpTempDir(String basedir) {
+ tmpDirectory = basedir + TestingUtil.TEST_PATH + File.separator + getClass().getSimpleName();
+ }
- /**
- * this is the same as the superclass, except that it doesn't attempt read-committed
- */
- @Override
- public void testTwoPhaseCommit() throws CacheLoaderException {
- List<Modification> mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Remove("k1"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- cs.prepare(mods, tx, false);
- cs.commit(tx);
+ @AfterTest
+ protected void clearTempDir() {
+ TestingUtil.recursiveFileRemove(tmpDirectory);
+ new File(tmpDirectory).mkdirs();
+ }
- assert cs.load("k2").getValue().equals("v2");
- assert !cs.containsKey("k1");
+ protected CacheStore createCacheStore() throws CacheLoaderException {
+ clearTempDir();
+ CacheStore cs = new BdbjeCacheStore();
+ BdbjeCacheStoreConfig cfg = new BdbjeCacheStoreConfig();
+ cfg.setLocation(tmpDirectory);
+ cfg.setPurgeSynchronously(true);
+ cs.init(cfg, getCache(), getMarshaller());
+ cs.start();
+ return cs;
+ }
- cs.clear();
+ /**
+ * this is the same as the superclass, except that it doesn't attempt read-committed
+ */
+ @Override
+ public void testTwoPhaseCommit() throws CacheLoaderException {
+ List<Modification> mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Remove("k1"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ cs.prepare(mods, tx, false);
+ cs.commit(tx);
- mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Clear());
- mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ assert cs.load("k2").getValue().equals("v2");
+ assert !cs.containsKey("k1");
- cs.prepare(mods, tx, false);
- cs.commit(tx);
+ cs.clear();
- assert !cs.containsKey("k1");
- assert !cs.containsKey("k2");
- assert cs.containsKey("k3");
- }
+ mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Clear());
+ mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
- /**
- * this is the same as the superclass, except that it doesn't attempt read-committed
- */
- @Override
- public void testRollback() throws CacheLoaderException {
+ cs.prepare(mods, tx, false);
+ cs.commit(tx);
- cs.store(InternalEntryFactory.create("old", "old"));
+ assert !cs.containsKey("k1");
+ assert !cs.containsKey("k2");
+ assert cs.containsKey("k3");
+ }
- List<Modification> mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Remove("k1"));
- mods.add(new Remove("old"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- cs.prepare(mods, tx, false);
- cs.rollback(tx);
+ /**
+ * this is the same as the superclass, except that it doesn't attempt read-committed
+ */
+ @Override
+ public void testRollback() throws CacheLoaderException {
- assert !cs.containsKey("k1");
- assert !cs.containsKey("k2");
- assert cs.containsKey("old");
+ cs.store(InternalEntryFactory.create("old", "old"));
- mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Clear());
- mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ List<Modification> mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Remove("k1"));
+ mods.add(new Remove("old"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ cs.prepare(mods, tx, false);
+ cs.rollback(tx);
- cs.prepare(mods, tx, false);
- cs.rollback(tx);
+ assert !cs.containsKey("k1");
+ assert !cs.containsKey("k2");
+ assert cs.containsKey("old");
- assert !cs.containsKey("k1");
- assert !cs.containsKey("k2");
- assert !cs.containsKey("k3");
- assert cs.containsKey("old");
- }
+ mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Clear());
+ mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ cs.prepare(mods, tx, false);
+ cs.rollback(tx);
+
+ assert !cs.containsKey("k1");
+ assert !cs.containsKey("k2");
+ assert !cs.containsKey("k3");
+ assert cs.containsKey("old");
+ }
+
}
Modified: trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeLearningTest.java
===================================================================
--- trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeLearningTest.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/cachestore/bdbje/src/test/java/org/infinispan/loader/bdbje/BdbjeLearningTest.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -7,15 +7,10 @@
import com.sleepycat.collections.StoredMap;
import com.sleepycat.collections.TransactionRunner;
import com.sleepycat.collections.TransactionWorker;
-import com.sleepycat.je.Cursor;
-import com.sleepycat.je.Database;
-import com.sleepycat.je.DatabaseConfig;
-import com.sleepycat.je.DatabaseEntry;
-import com.sleepycat.je.DatabaseException;
-import com.sleepycat.je.Environment;
-import com.sleepycat.je.EnvironmentConfig;
-import com.sleepycat.je.OperationStatus;
+import com.sleepycat.je.*;
import org.easymock.EasyMock;
+import org.infinispan.container.entries.InternalCacheEntry;
+import org.infinispan.container.entries.InternalEntryFactory;
import org.infinispan.loader.CacheLoaderException;
import org.infinispan.loader.modifications.Clear;
import org.infinispan.loader.modifications.Modification;
@@ -24,30 +19,11 @@
import org.infinispan.logging.Log;
import org.infinispan.logging.LogFactory;
import org.infinispan.test.TestingUtil;
-import org.infinispan.container.entries.InternalCacheEntry;
-import org.infinispan.container.entries.InternalEntryFactory;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
+import org.testng.annotations.*;
import javax.transaction.Transaction;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
+import java.io.*;
+import java.util.*;
/**
* Learning tests for SleepyCat JE. Behaviour here is used in BdbjeCacheLoader. When there are upgrades to bdbje, this
@@ -59,635 +35,648 @@
*/
@Test(groups = "unit", enabled = true, testName = "loader.bdbje.BdbjeLearningTest")
public class BdbjeLearningTest {
- String dbHome = TestingUtil.TEST_FILES + "/Horizon-BdbjeLearningTest";
- Environment env;
+ Environment env;
- private static final String CLASS_CATALOG = "java_class_catalog";
- private StoredClassCatalog javaCatalog;
+ private static final String CLASS_CATALOG = "java_class_catalog";
+ private StoredClassCatalog javaCatalog;
- private static final String STORED_ENTRIES = "storedEntriesDb";
- private Database storedEntriesDb;
- private StoredMap<Object, InternalCacheEntry> cacheMap;
+ private static final String STORED_ENTRIES = "storedEntriesDb";
+ private Database storedEntriesDb;
+ private StoredMap<Object, InternalCacheEntry> cacheMap;
+ private String tmpDirectory;
- @BeforeMethod
- public void setUp() throws Exception {
- new File(dbHome).mkdirs();
- System.out.println("Opening environment in: " + dbHome);
+ @BeforeTest
+ @Parameters({"basedir"})
+ protected void setUpTempDir(String basedir) {
+ tmpDirectory = basedir + TestingUtil.TEST_PATH + File.separator + getClass().getSimpleName();
+ }
- EnvironmentConfig envConfig = new EnvironmentConfig();
- envConfig.setTransactional(true);
- envConfig.setAllowCreate(true);
+ @AfterTest
+ protected void clearTempDir() {
+ TestingUtil.recursiveFileRemove(tmpDirectory);
+ new File(tmpDirectory).mkdirs();
+ }
- env = new Environment(new File(dbHome), envConfig);
- DatabaseConfig dbConfig = new DatabaseConfig();
- dbConfig.setTransactional(true);
- dbConfig.setAllowCreate(true);
+ @BeforeMethod
+ public void setUp() throws Exception {
+ new File(tmpDirectory).mkdirs();
+ System.out.println("Opening environment in: " + tmpDirectory);
- Database catalogDb = env.openDatabase(null, CLASS_CATALOG, dbConfig);
+ EnvironmentConfig envConfig = new EnvironmentConfig();
+ envConfig.setTransactional(true);
+ envConfig.setAllowCreate(true);
- javaCatalog = new StoredClassCatalog(catalogDb);
+ env = new Environment(new File(tmpDirectory), envConfig);
+ DatabaseConfig dbConfig = new DatabaseConfig();
+ dbConfig.setTransactional(true);
+ dbConfig.setAllowCreate(true);
- EntryBinding storedEntryKeyBinding =
- new SerialBinding(javaCatalog, Object.class);
- EntryBinding storedEntryValueBinding =
- new SerialBinding(javaCatalog, InternalCacheEntry.class);
+ Database catalogDb = env.openDatabase(null, CLASS_CATALOG, dbConfig);
- storedEntriesDb = env.openDatabase(null, STORED_ENTRIES, dbConfig);
+ javaCatalog = new StoredClassCatalog(catalogDb);
- cacheMap =
- new StoredMap<Object, InternalCacheEntry>(storedEntriesDb,
- storedEntryKeyBinding, storedEntryValueBinding, true);
+ EntryBinding storedEntryKeyBinding =
+ new SerialBinding(javaCatalog, Object.class);
+ EntryBinding storedEntryValueBinding =
+ new SerialBinding(javaCatalog, InternalCacheEntry.class);
+ storedEntriesDb = env.openDatabase(null, STORED_ENTRIES, dbConfig);
- }
+ cacheMap =
+ new StoredMap<Object, InternalCacheEntry>(storedEntriesDb,
+ storedEntryKeyBinding, storedEntryValueBinding, true);
- public void testTransactionWorker() throws Exception {
- TransactionRunner runner = new TransactionRunner(env);
- runner.run(new PopulateDatabase());
- runner.run(new PrintDatabase());
- }
+ }
+ public void testTransactionWorker() throws Exception {
+ TransactionRunner runner = new TransactionRunner(env);
+ runner.run(new PopulateDatabase());
+ runner.run(new PrintDatabase());
- private class PopulateDatabase implements TransactionWorker {
- public void doWork()
- throws Exception {
- }
- }
+ }
- private class PrintDatabase implements TransactionWorker {
- public void doWork()
- throws Exception {
- }
- }
+ private class PopulateDatabase implements TransactionWorker {
+ public void doWork()
+ throws Exception {
+ }
+ }
- @AfterMethod
- public void tearDown() throws Exception {
- storedEntriesDb.close();
- javaCatalog.close();
- env.close();
+ private class PrintDatabase implements TransactionWorker {
+ public void doWork()
+ throws Exception {
+ }
+ }
- TestingUtil.recursiveFileRemove(dbHome);
- }
+ @AfterMethod
+ public void tearDown() throws Exception {
+ storedEntriesDb.close();
+ javaCatalog.close();
+ env.close();
- private void store(InternalCacheEntry se) {
- cacheMap.put(se.getKey(), se);
- }
+ TestingUtil.recursiveFileRemove(tmpDirectory);
+ }
- private InternalCacheEntry load(Object key) {
- InternalCacheEntry s = cacheMap.get(key);
- if (s == null)
- return null;
- if (!s.isExpired())
- return s;
- else
- cacheMap.remove(key);
- return null;
- }
+ private void store(InternalCacheEntry se) {
+ cacheMap.put(se.getKey(), se);
+ }
- private Set loadAll() {
- return new HashSet(cacheMap.values());
- }
- private void purgeExpired() {
- Iterator<Map.Entry<Object, InternalCacheEntry>> i = cacheMap.entrySet().iterator();
- while (i.hasNext()) {
- if (i.next().getValue().isExpired())
- i.remove();
- }
- }
+ private InternalCacheEntry load(Object key) {
+ InternalCacheEntry s = cacheMap.get(key);
+ if (s == null)
+ return null;
+ if (!s.isExpired())
+ return s;
+ else
+ cacheMap.remove(key);
+ return null;
+ }
- private static final Log log = LogFactory.getLog(BdbjeLearningTest.class);
+ private Set loadAll() {
+ return new HashSet(cacheMap.values());
+ }
- private void toStream(OutputStream outputStream) throws CacheLoaderException {
- ObjectOutputStream oos = null;
- Cursor cursor = null;
+ private void purgeExpired() {
+ Iterator<Map.Entry<Object, InternalCacheEntry>> i = cacheMap.entrySet().iterator();
+ while (i.hasNext()) {
+ if (i.next().getValue().isExpired())
+ i.remove();
+ }
+ }
- try {
- oos = (outputStream instanceof ObjectOutputStream) ? (ObjectOutputStream) outputStream :
- new ObjectOutputStream(outputStream);
- long recordCount = storedEntriesDb.count();
- log.trace("writing {0} records to stream", recordCount);
- oos.writeLong(recordCount);
+ private static final Log log = LogFactory.getLog(BdbjeLearningTest.class);
- cursor = storedEntriesDb.openCursor(null, null);
- DatabaseEntry key = new DatabaseEntry();
- DatabaseEntry data = new DatabaseEntry();
- while (cursor.getNext(key, data, null) ==
- OperationStatus.SUCCESS) {
- oos.writeObject(key.getData());
- oos.writeObject(data.getData());
- }
- } catch (IOException e) {
- throw new CacheLoaderException("Error writing to object stream", e);
- } catch (DatabaseException e) {
- throw new CacheLoaderException("Error accessing database", e);
- }
- finally {
- if (cursor != null) try {
- cursor.close();
- } catch (DatabaseException e) {
- throw new CacheLoaderException("Error closing cursor", e);
- }
- }
+ private void toStream(OutputStream outputStream) throws CacheLoaderException {
+ ObjectOutputStream oos = null;
+ Cursor cursor = null;
- }
+ try {
+ oos = (outputStream instanceof ObjectOutputStream) ? (ObjectOutputStream) outputStream :
+ new ObjectOutputStream(outputStream);
+ long recordCount = storedEntriesDb.count();
+ log.trace("writing {0} records to stream", recordCount);
+ oos.writeLong(recordCount);
- private void fromStream(InputStream inputStream) throws CacheLoaderException {
- ObjectInputStream ois = null;
- try {
- ois = (inputStream instanceof ObjectInputStream) ? (ObjectInputStream) inputStream :
- new ObjectInputStream(inputStream);
- long recordCount = ois.readLong();
- log.info("reading {0} records from stream", recordCount);
- log.info("clearing all records");
- cacheMap.clear();
- Cursor cursor = null;
- com.sleepycat.je.Transaction txn = env.beginTransaction(null, null);
- try {
- cursor = storedEntriesDb.openCursor(txn, null);
- for (int i = 0; i < recordCount; i++) {
- byte[] keyBytes = (byte[]) ois.readObject();
- byte[] dataBytes = (byte[]) ois.readObject();
-
- DatabaseEntry key = new DatabaseEntry(keyBytes);
- DatabaseEntry data = new DatabaseEntry(dataBytes);
- cursor.put(key, data);
+ cursor = storedEntriesDb.openCursor(null, null);
+ DatabaseEntry key = new DatabaseEntry();
+ DatabaseEntry data = new DatabaseEntry();
+ while (cursor.getNext(key, data, null) ==
+ OperationStatus.SUCCESS) {
+ oos.writeObject(key.getData());
+ oos.writeObject(data.getData());
}
- cursor.close();
- cursor = null;
- txn.commit();
- } finally {
- if (cursor != null) cursor.close();
- }
+ } catch (IOException e) {
+ throw new CacheLoaderException("Error writing to object stream", e);
+ } catch (DatabaseException e) {
+ throw new CacheLoaderException("Error accessing database", e);
+ }
+ finally {
+ if (cursor != null) try {
+ cursor.close();
+ } catch (DatabaseException e) {
+ throw new CacheLoaderException("Error closing cursor", e);
+ }
+ }
- }
- catch (Exception e) {
- CacheLoaderException cle = (e instanceof CacheLoaderException) ? (CacheLoaderException) e :
- new CacheLoaderException("Problems reading from stream", e);
- throw cle;
- }
- }
+ }
- class StoreTransactionWorker implements TransactionWorker {
- StoreTransactionWorker(InternalCacheEntry entry) {
- this.entry = entry;
- }
+ private void fromStream(InputStream inputStream) throws CacheLoaderException {
+ ObjectInputStream ois = null;
+ try {
+ ois = (inputStream instanceof ObjectInputStream) ? (ObjectInputStream) inputStream :
+ new ObjectInputStream(inputStream);
+ long recordCount = ois.readLong();
+ log.info("reading {0} records from stream", recordCount);
+ log.info("clearing all records");
+ cacheMap.clear();
+ Cursor cursor = null;
+ com.sleepycat.je.Transaction txn = env.beginTransaction(null, null);
+ try {
+ cursor = storedEntriesDb.openCursor(txn, null);
+ for (int i = 0; i < recordCount; i++) {
+ byte[] keyBytes = (byte[]) ois.readObject();
+ byte[] dataBytes = (byte[]) ois.readObject();
- private InternalCacheEntry entry;
+ DatabaseEntry key = new DatabaseEntry(keyBytes);
+ DatabaseEntry data = new DatabaseEntry(dataBytes);
+ cursor.put(key, data);
+ }
+ cursor.close();
+ cursor = null;
+ txn.commit();
+ } finally {
+ if (cursor != null) cursor.close();
+ }
- public void doWork() throws Exception {
- store(entry);
- }
- }
+ }
+ catch (Exception e) {
+ CacheLoaderException cle = (e instanceof CacheLoaderException) ? (CacheLoaderException) e :
+ new CacheLoaderException("Problems reading from stream", e);
+ throw cle;
+ }
+ }
- class ClearTransactionWorker implements TransactionWorker {
+ class StoreTransactionWorker implements TransactionWorker {
+ StoreTransactionWorker(InternalCacheEntry entry) {
+ this.entry = entry;
+ }
- public void doWork() throws Exception {
- cacheMap.clear();
- }
- }
+ private InternalCacheEntry entry;
- class RemoveTransactionWorker implements TransactionWorker {
- RemoveTransactionWorker(Object key) {
- this.key = key;
- }
+ public void doWork() throws Exception {
+ store(entry);
+ }
+ }
- Object key;
+ class ClearTransactionWorker implements TransactionWorker {
- public void doWork() throws Exception {
- cacheMap.remove(key);
- }
- }
+ public void doWork() throws Exception {
+ cacheMap.clear();
+ }
+ }
- class PurgeExpiredTransactionWorker implements TransactionWorker {
- public void doWork() throws Exception {
- purgeExpired();
- }
- }
+ class RemoveTransactionWorker implements TransactionWorker {
+ RemoveTransactionWorker(Object key) {
+ this.key = key;
+ }
- class ModificationsTransactionWorker implements TransactionWorker {
- private List<? extends Modification> mods;
+ Object key;
- ModificationsTransactionWorker(List<? extends Modification> mods) {
- this.mods = mods;
- }
+ public void doWork() throws Exception {
+ cacheMap.remove(key);
+ }
+ }
- public void doWork() throws Exception {
- for (Modification modification : mods)
- switch (modification.getType()) {
- case STORE:
- Store s = (Store) modification;
- store(s.getStoredEntry());
- break;
- case CLEAR:
- cacheMap.clear();
- break;
- case REMOVE:
- Remove r = (Remove) modification;
- cacheMap.remove(r.getKey());
- break;
- case PURGE_EXPIRED:
- purgeExpired();
- break;
- default:
- throw new IllegalArgumentException("Unknown modification type " + modification.getType());
- }
- }
- }
+ class PurgeExpiredTransactionWorker implements TransactionWorker {
+ public void doWork() throws Exception {
+ purgeExpired();
+ }
+ }
+ class ModificationsTransactionWorker implements TransactionWorker {
+ private List<? extends Modification> mods;
- private void prepare(List<Modification> mods, Transaction tx, boolean isOnePhase) throws CacheLoaderException {
- if (isOnePhase) {
- TransactionRunner runner = new TransactionRunner(env);
- try {
- runner.run(new ModificationsTransactionWorker(mods));
- } catch (Exception e) {
- e.printStackTrace();
- }
- } else {
- PreparableTransactionRunner runner = new PreparableTransactionRunner(env);
- com.sleepycat.je.Transaction txn = null;
- try {
- runner.prepare(new ModificationsTransactionWorker(mods));
- txn = CurrentTransaction.getInstance(env).getTransaction();
- txnMap.put(tx, txn);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
+ ModificationsTransactionWorker(List<? extends Modification> mods) {
+ this.mods = mods;
+ }
- }
+ public void doWork() throws Exception {
+ for (Modification modification : mods)
+ switch (modification.getType()) {
+ case STORE:
+ Store s = (Store) modification;
+ store(s.getStoredEntry());
+ break;
+ case CLEAR:
+ cacheMap.clear();
+ break;
+ case REMOVE:
+ Remove r = (Remove) modification;
+ cacheMap.remove(r.getKey());
+ break;
+ case PURGE_EXPIRED:
+ purgeExpired();
+ break;
+ default:
+ throw new IllegalArgumentException("Unknown modification type " + modification.getType());
+ }
+ }
+ }
- Map<Transaction, com.sleepycat.je.Transaction> txnMap = new HashMap<Transaction, com.sleepycat.je.Transaction>();
- private void commit(Transaction tx) {
- com.sleepycat.je.Transaction txn = txnMap.remove(tx);
- CurrentTransaction currentTransaction = CurrentTransaction.getInstance(env);
- if (txn != null) {
- if (currentTransaction.getTransaction() == txn) {
+ private void prepare(List<Modification> mods, Transaction tx, boolean isOnePhase) throws CacheLoaderException {
+ if (isOnePhase) {
+ TransactionRunner runner = new TransactionRunner(env);
try {
- currentTransaction.commitTransaction();
- } catch (DatabaseException e) {
- e.printStackTrace();
+ runner.run(new ModificationsTransactionWorker(mods));
+ } catch (Exception e) {
+ e.printStackTrace();
}
- } else {
- log.error("Transactions must be committed on the same thread");
- }
- }
- }
-
- private void rollback(Transaction tx) {
- com.sleepycat.je.Transaction txn = txnMap.remove(tx);
- CurrentTransaction currentTransaction = CurrentTransaction.getInstance(env);
- if (txn != null) {
- if (currentTransaction.getTransaction() == txn) {
+ } else {
+ PreparableTransactionRunner runner = new PreparableTransactionRunner(env);
+ com.sleepycat.je.Transaction txn = null;
try {
- currentTransaction.abortTransaction();
- } catch (DatabaseException e) {
- e.printStackTrace();
+ runner.prepare(new ModificationsTransactionWorker(mods));
+ txn = CurrentTransaction.getInstance(env).getTransaction();
+ txnMap.put(tx, txn);
+ } catch (Exception e) {
+ e.printStackTrace();
}
- } else {
- log.error("Transactions must be committed on the same thread");
- }
- }
- }
+ }
- public void testLoadAndStore() throws InterruptedException, CacheLoaderException {
- assert !cacheMap.containsKey("k");
- InternalCacheEntry se = InternalEntryFactory.create("k", "v");
- store(se);
+ }
- assert load("k").getValue().equals("v");
- assert load("k").getLifespan() == -1;
- assert !load("k").isExpired();
- assert cacheMap.containsKey("k");
+ Map<Transaction, com.sleepycat.je.Transaction> txnMap = new HashMap<Transaction, com.sleepycat.je.Transaction>();
- long lifespan = 120000;
- se = InternalEntryFactory.create("k", "v", lifespan);
- store(se);
+ private void commit(Transaction tx) {
+ com.sleepycat.je.Transaction txn = txnMap.remove(tx);
+ CurrentTransaction currentTransaction = CurrentTransaction.getInstance(env);
+ if (txn != null) {
+ if (currentTransaction.getTransaction() == txn) {
+ try {
+ currentTransaction.commitTransaction();
+ } catch (DatabaseException e) {
+ e.printStackTrace();
+ }
+ } else {
+ log.error("Transactions must be committed on the same thread");
+ }
+ }
+ }
- assert load("k").getValue().equals("v");
- assert load("k").getLifespan() == lifespan;
- assert !load("k").isExpired();
- assert cacheMap.containsKey("k");
+ private void rollback(Transaction tx) {
+ com.sleepycat.je.Transaction txn = txnMap.remove(tx);
+ CurrentTransaction currentTransaction = CurrentTransaction.getInstance(env);
+ if (txn != null) {
+ if (currentTransaction.getTransaction() == txn) {
+ try {
+ currentTransaction.abortTransaction();
+ } catch (DatabaseException e) {
+ e.printStackTrace();
+ }
+ } else {
+ log.error("Transactions must be committed on the same thread");
+ }
+ }
+ }
- lifespan = 1;
- se = InternalEntryFactory.create("k", "v", lifespan);
- store(se);
- Thread.sleep(100);
- assert se.isExpired();
- assert load("k") == null;
- assert !cacheMap.containsKey("k");
- }
+ public void testLoadAndStore() throws InterruptedException, CacheLoaderException {
+ assert !cacheMap.containsKey("k");
+ InternalCacheEntry se = InternalEntryFactory.create("k", "v");
+ store(se);
+ assert load("k").getValue().equals("v");
+ assert load("k").getLifespan() == -1;
+ assert !load("k").isExpired();
+ assert cacheMap.containsKey("k");
- public void testOnePhaseCommit() throws CacheLoaderException {
- List<Modification> mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Remove("k1"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- prepare(mods, tx, true);
+ long lifespan = 120000;
+ se = InternalEntryFactory.create("k", "v", lifespan);
+ store(se);
- Set s = loadAll();
+ assert load("k").getValue().equals("v");
+ assert load("k").getLifespan() == lifespan;
+ assert !load("k").isExpired();
+ assert cacheMap.containsKey("k");
- assert load("k2").getValue().equals("v2");
- assert !cacheMap.containsKey("k1");
+ lifespan = 1;
+ se = InternalEntryFactory.create("k", "v", lifespan);
+ store(se);
+ Thread.sleep(100);
+ assert se.isExpired();
+ assert load("k") == null;
+ assert !cacheMap.containsKey("k");
+ }
- cacheMap.clear();
- mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Clear());
- mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ public void testOnePhaseCommit() throws CacheLoaderException {
+ List<Modification> mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Remove("k1"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ prepare(mods, tx, true);
- prepare(mods, tx, true);
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert cacheMap.containsKey("k3");
- }
+ Set s = loadAll();
+ assert load("k2").getValue().equals("v2");
+ assert !cacheMap.containsKey("k1");
- public void testTwoPhaseCommit() throws Throwable {
- final List<Throwable> throwables = new ArrayList<Throwable>();
- List<Modification> mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Remove("k1"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- prepare(mods, tx, false);
+ cacheMap.clear();
+ mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Clear());
+ mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
- Thread gets1 = new Thread(
- new Runnable() {
- public void run() {
- try {
- assert load("k2").getValue().equals("v2");
- assert !cacheMap.containsKey("k1");
- } catch (Throwable e) {
- throwables.add(e);
- }
- }
- }
- );
+ prepare(mods, tx, true);
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert cacheMap.containsKey("k3");
+ }
- gets1.start();
- commit(tx);
- gets1.join();
+ public void testTwoPhaseCommit() throws Throwable {
+ final List<Throwable> throwables = new ArrayList<Throwable>();
+ List<Modification> mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Remove("k1"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ prepare(mods, tx, false);
- if (!throwables.isEmpty()) throw throwables.get(0);
+ Thread gets1 = new Thread(
+ new Runnable() {
+ public void run() {
+ try {
+ assert load("k2").getValue().equals("v2");
+ assert !cacheMap.containsKey("k1");
+ } catch (Throwable e) {
+ throwables.add(e);
+ }
+ }
+ }
+ );
- cacheMap.clear();
+ gets1.start();
+ commit(tx);
- mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Clear());
- mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ gets1.join();
- prepare(mods, tx, false);
+ if (!throwables.isEmpty()) throw throwables.get(0);
- Thread gets2 = new Thread(
- new Runnable() {
- public void run() {
- try {
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert cacheMap.containsKey("k3");
- } catch (Throwable e) {
- throwables.add(e);
- }
- }
- }
- );
+ cacheMap.clear();
- gets2.start();
+ mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Clear());
+ mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
+ prepare(mods, tx, false);
- commit(tx);
- gets2.join();
+ Thread gets2 = new Thread(
+ new Runnable() {
+ public void run() {
+ try {
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert cacheMap.containsKey("k3");
- if (!throwables.isEmpty()) throw throwables.get(0);
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert cacheMap.containsKey("k3");
- }
+ } catch (Throwable e) {
+ throwables.add(e);
+ }
+ }
+ }
+ );
+ gets2.start();
- public void testRollback() throws Throwable {
- store(InternalEntryFactory.create("old", "old"));
+ commit(tx);
+ gets2.join();
+ if (!throwables.isEmpty()) throw throwables.get(0);
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert cacheMap.containsKey("k3");
+ }
- List<Modification> mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Remove("k1"));
- mods.add(new Remove("old"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- prepare(mods, tx, false);
- rollback(tx);
+ public void testRollback() throws Throwable {
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert cacheMap.containsKey("old");
+ store(InternalEntryFactory.create("old", "old"));
- mods = new ArrayList<Modification>();
- mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
- mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
- mods.add(new Clear());
- mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
- prepare(mods, tx, false);
+ List<Modification> mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Remove("k1"));
+ mods.add(new Remove("old"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ prepare(mods, tx, false);
- rollback(tx);
+ rollback(tx);
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert !cacheMap.containsKey("k3");
- assert cacheMap.containsKey("old");
- }
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert cacheMap.containsKey("old");
+ mods = new ArrayList<Modification>();
+ mods.add(new Store(InternalEntryFactory.create("k1", "v1")));
+ mods.add(new Store(InternalEntryFactory.create("k2", "v2")));
+ mods.add(new Clear());
+ mods.add(new Store(InternalEntryFactory.create("k3", "v3")));
- public void testCommitAndRollbackWithoutPrepare() throws CacheLoaderException {
- store(InternalEntryFactory.create("old", "old"));
- Transaction tx = EasyMock.createNiceMock(Transaction.class);
- commit(tx);
- store(InternalEntryFactory.create("old", "old"));
- rollback(tx);
+ prepare(mods, tx, false);
- assert cacheMap.containsKey("old");
- }
+ rollback(tx);
- public void testPreload() throws CacheLoaderException {
- store(InternalEntryFactory.create("k1", "v1"));
- store(InternalEntryFactory.create("k2", "v2"));
- store(InternalEntryFactory.create("k3", "v3"));
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert !cacheMap.containsKey("k3");
+ assert cacheMap.containsKey("old");
+ }
- Set<InternalCacheEntry> set = loadAll();
- assert set.size() == 3;
- Set expected = new HashSet();
- expected.add("k1");
- expected.add("k2");
- expected.add("k3");
- for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
- assert expected.isEmpty();
- }
+ public void testCommitAndRollbackWithoutPrepare() throws CacheLoaderException {
+ store(InternalEntryFactory.create("old", "old"));
+ Transaction tx = EasyMock.createNiceMock(Transaction.class);
+ commit(tx);
+ store(InternalEntryFactory.create("old", "old"));
+ rollback(tx);
- public void testPurgeExpired() throws Exception {
- long now = System.currentTimeMillis();
- long lifespan = 1000;
- store(InternalEntryFactory.create("k1", "v1", lifespan));
- store(InternalEntryFactory.create("k2", "v2", lifespan));
- store(InternalEntryFactory.create("k3", "v3", lifespan));
+ assert cacheMap.containsKey("old");
+ }
- assert cacheMap.containsKey("k1");
- assert cacheMap.containsKey("k2");
- assert cacheMap.containsKey("k3");
- Thread.sleep(lifespan + 100);
- purgeExpired();
- assert !cacheMap.containsKey("k1");
- assert !cacheMap.containsKey("k2");
- assert !cacheMap.containsKey("k3");
- }
+ public void testPreload() throws CacheLoaderException {
+ store(InternalEntryFactory.create("k1", "v1"));
+ store(InternalEntryFactory.create("k2", "v2"));
+ store(InternalEntryFactory.create("k3", "v3"));
+ Set<InternalCacheEntry> set = loadAll();
- public void testStreamingAPI() throws IOException, ClassNotFoundException, CacheLoaderException {
- store(InternalEntryFactory.create("k1", "v1"));
- store(InternalEntryFactory.create("k2", "v2"));
- store(InternalEntryFactory.create("k3", "v3"));
+ assert set.size() == 3;
+ Set expected = new HashSet();
+ expected.add("k1");
+ expected.add("k2");
+ expected.add("k3");
+ for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
+ assert expected.isEmpty();
+ }
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- toStream(out);
- out.close();
- cacheMap.clear();
- fromStream(new ByteArrayInputStream(out.toByteArray()));
+ public void testPurgeExpired() throws Exception {
+ long now = System.currentTimeMillis();
+ long lifespan = 1000;
+ store(InternalEntryFactory.create("k1", "v1", lifespan));
+ store(InternalEntryFactory.create("k2", "v2", lifespan));
+ store(InternalEntryFactory.create("k3", "v3", lifespan));
- Set<InternalCacheEntry> set = loadAll();
+ assert cacheMap.containsKey("k1");
+ assert cacheMap.containsKey("k2");
+ assert cacheMap.containsKey("k3");
+ Thread.sleep(lifespan + 100);
+ purgeExpired();
+ assert !cacheMap.containsKey("k1");
+ assert !cacheMap.containsKey("k2");
+ assert !cacheMap.containsKey("k3");
+ }
- assert set.size() == 3;
- Set expected = new HashSet();
- expected.add("k1");
- expected.add("k2");
- expected.add("k3");
- for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
- assert expected.isEmpty();
- }
+ public void testStreamingAPI() throws IOException, ClassNotFoundException, CacheLoaderException {
+ store(InternalEntryFactory.create("k1", "v1"));
+ store(InternalEntryFactory.create("k2", "v2"));
+ store(InternalEntryFactory.create("k3", "v3"));
- public void testStreamingAPIReusingStreams() throws IOException, ClassNotFoundException, CacheLoaderException {
- store(InternalEntryFactory.create("k1", "v1"));
- store(InternalEntryFactory.create("k2", "v2"));
- store(InternalEntryFactory.create("k3", "v3"));
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ toStream(out);
+ out.close();
+ cacheMap.clear();
+ fromStream(new ByteArrayInputStream(out.toByteArray()));
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- byte[] dummyStartBytes = {1, 2, 3, 4, 5, 6, 7, 8};
- byte[] dummyEndBytes = {8, 7, 6, 5, 4, 3, 2, 1};
- out.write(dummyStartBytes);
- toStream(out);
- out.write(dummyEndBytes);
- out.close();
- cacheMap.clear();
+ Set<InternalCacheEntry> set = loadAll();
- // first pop the start bytes
- byte[] dummy = new byte[8];
- ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
- int bytesRead = in.read(dummy, 0, 8);
- assert bytesRead == 8;
- for (int i = 1; i < 9; i++) assert dummy[i - 1] == i : "Start byte stream corrupted!";
- fromStream(in);
- bytesRead = in.read(dummy, 0, 8);
- assert bytesRead == 8;
- for (int i = 8; i > 0; i--) assert dummy[8 - i] == i : "Start byte stream corrupted!";
+ assert set.size() == 3;
+ Set expected = new HashSet();
+ expected.add("k1");
+ expected.add("k2");
+ expected.add("k3");
+ for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
+ assert expected.isEmpty();
+ }
- Set<InternalCacheEntry> set = loadAll();
- assert set.size() == 3;
- Set expected = new HashSet();
- expected.add("k1");
- expected.add("k2");
- expected.add("k3");
- for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
- assert expected.isEmpty();
- }
+ public void testStreamingAPIReusingStreams() throws IOException, ClassNotFoundException, CacheLoaderException {
+ store(InternalEntryFactory.create("k1", "v1"));
+ store(InternalEntryFactory.create("k2", "v2"));
+ store(InternalEntryFactory.create("k3", "v3"));
- public void testConcurrency() throws Throwable {
- int numThreads = 3;
- final int loops = 500;
- final String[] keys = new String[10];
- final String[] values = new String[10];
- for (int i = 0; i < 10; i++) keys[i] = "k" + i;
- for (int i = 0; i < 10; i++) values[i] = "v" + i;
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ byte[] dummyStartBytes = {1, 2, 3, 4, 5, 6, 7, 8};
+ byte[] dummyEndBytes = {8, 7, 6, 5, 4, 3, 2, 1};
+ out.write(dummyStartBytes);
+ toStream(out);
+ out.write(dummyEndBytes);
+ out.close();
+ cacheMap.clear();
+ // first pop the start bytes
+ byte[] dummy = new byte[8];
+ ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
+ int bytesRead = in.read(dummy, 0, 8);
+ assert bytesRead == 8;
+ for (int i = 1; i < 9; i++) assert dummy[i - 1] == i : "Start byte stream corrupted!";
+ fromStream(in);
+ bytesRead = in.read(dummy, 0, 8);
+ assert bytesRead == 8;
+ for (int i = 8; i > 0; i--) assert dummy[8 - i] == i : "Start byte stream corrupted!";
- final Random r = new Random();
- final List<Throwable> throwables = new LinkedList<Throwable>();
+ Set<InternalCacheEntry> set = loadAll();
- final Runnable store = new Runnable() {
- public void run() {
- try {
- int randomInt = r.nextInt(10);
- store(InternalEntryFactory.create(keys[randomInt], values[randomInt]));
- } catch (Throwable e) {
- throwables.add(e);
- }
- }
- };
+ assert set.size() == 3;
+ Set expected = new HashSet();
+ expected.add("k1");
+ expected.add("k2");
+ expected.add("k3");
+ for (InternalCacheEntry se : set) assert expected.remove(se.getKey());
+ assert expected.isEmpty();
+ }
- final Runnable remove = new Runnable() {
- public void run() {
- try {
- cacheMap.remove(keys[r.nextInt(10)]);
- } catch (Throwable e) {
- throwables.add(e);
+ @Test(enabled=false)
+ public void testConcurrency() throws Throwable {
+ int numThreads = 3;
+ final int loops = 500;
+ final String[] keys = new String[10];
+ final String[] values = new String[10];
+ for (int i = 0; i < 10; i++) keys[i] = "k" + i;
+ for (int i = 0; i < 10; i++) values[i] = "v" + i;
+
+
+ final Random r = new Random();
+ final List<Throwable> throwables = new LinkedList<Throwable>();
+
+ final Runnable store = new Runnable() {
+ public void run() {
+ try {
+ int randomInt = r.nextInt(10);
+ store(InternalEntryFactory.create(keys[randomInt], values[randomInt]));
+ } catch (Throwable e) {
+ throwables.add(e);
+ }
}
- }
- };
+ };
- final Runnable get = new Runnable() {
- public void run() {
- try {
- int randomInt = r.nextInt(10);
- InternalCacheEntry se = load(keys[randomInt]);
- assert se == null || se.getValue().equals(values[randomInt]);
- loadAll();
- } catch (Throwable e) {
- throwables.add(e);
+ final Runnable remove = new Runnable() {
+ public void run() {
+ try {
+ cacheMap.remove(keys[r.nextInt(10)]);
+ } catch (Throwable e) {
+ throwables.add(e);
+ }
}
- }
- };
+ };
- Thread[] threads = new Thread[numThreads];
-
- for (int i = 0; i < numThreads; i++) {
- threads[i] = new Thread(getClass().getSimpleName() + "-" + i) {
+ final Runnable get = new Runnable() {
public void run() {
- for (int i = 0; i < loops; i++) {
- store.run();
- remove.run();
- get.run();
- }
+ try {
+ int randomInt = r.nextInt(10);
+ InternalCacheEntry se = load(keys[randomInt]);
+ assert se == null || se.getValue().equals(values[randomInt]);
+ loadAll();
+ } catch (Throwable e) {
+ throwables.add(e);
+ }
}
- };
- }
+ };
- for (Thread t : threads) t.start();
- for (Thread t : threads) t.join();
+ Thread[] threads = new Thread[numThreads];
- if (!throwables.isEmpty()) throw throwables.get(0);
- }
+ for (int i = 0; i < numThreads; i++) {
+ threads[i] = new Thread(getClass().getSimpleName() + "-" + i) {
+ public void run() {
+ for (int i = 0; i < loops; i++) {
+ store.run();
+ remove.run();
+ get.run();
+ }
+ }
+ };
+ }
+ for (Thread t : threads) t.start();
+ for (Thread t : threads) t.join();
+ if (!throwables.isEmpty()) throw throwables.get(0);
+ }
+
+
}
Modified: trunk/core/src/main/java/org/infinispan/loader/file/FileCacheStore.java
===================================================================
--- trunk/core/src/main/java/org/infinispan/loader/file/FileCacheStore.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/core/src/main/java/org/infinispan/loader/file/FileCacheStore.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -1,8 +1,8 @@
package org.infinispan.loader.file;
import org.infinispan.Cache;
+import org.infinispan.config.ConfigurationException;
import org.infinispan.container.entries.InternalCacheEntry;
-import org.infinispan.config.ConfigurationException;
import org.infinispan.loader.CacheLoaderConfig;
import org.infinispan.loader.CacheLoaderException;
import org.infinispan.loader.bucket.Bucket;
@@ -25,186 +25,193 @@
*/
public class FileCacheStore extends BucketBasedCacheStore {
- private static final Log log = LogFactory.getLog(FileCacheStore.class);
- private int streamBufferSize;
+ private static final Log log = LogFactory.getLog(FileCacheStore.class);
+ private int streamBufferSize;
- FileCacheStoreConfig config;
- Cache cache;
- File root;
- /**
- * @return root directory where all files for this {@link org.infinispan.loader.CacheStore CacheStore} are written.
- */
- public File getRoot() {
- return root;
- }
+ FileCacheStoreConfig config;
+ Cache cache;
+ File root;
- public void init(CacheLoaderConfig config, Cache cache, Marshaller m) {
- super.init(config, cache, m);
- this.config = (FileCacheStoreConfig) config;
- this.cache = cache;
- }
+ /**
+ * @return root directory where all files for this {@link org.infinispan.loader.CacheStore CacheStore} are written.
+ */
+ public File getRoot() {
+ return root;
+ }
- protected Set<InternalCacheEntry> loadAllLockSafe() throws CacheLoaderException {
- Set<InternalCacheEntry> result = new HashSet<InternalCacheEntry>();
- for (File bucketFile : root.listFiles()) {
- Bucket bucket = loadBucket(bucketFile);
- if (bucket != null) {
- if (bucket.removeExpiredEntries()) {
- saveBucket(bucket);
+ public void init(CacheLoaderConfig config, Cache cache, Marshaller m) {
+ super.init(config, cache, m);
+ this.config = (FileCacheStoreConfig) config;
+ this.cache = cache;
+ }
+
+ protected Set<InternalCacheEntry> loadAllLockSafe() throws CacheLoaderException {
+ Set<InternalCacheEntry> result = new HashSet<InternalCacheEntry>();
+ for (File bucketFile : root.listFiles()) {
+ Bucket bucket = loadBucket(bucketFile);
+ if (bucket != null) {
+ if (bucket.removeExpiredEntries()) {
+ saveBucket(bucket);
+ }
+ result.addAll(bucket.getStoredEntries());
}
- result.addAll(bucket.getStoredEntries());
- }
- }
- return result;
- }
+ }
+ return result;
+ }
- protected void fromStreamLockSafe(ObjectInput objectInput) throws CacheLoaderException {
- try {
- int numFiles = objectInput.readInt();
- byte[] buffer = new byte[streamBufferSize];
- int bytesRead, totalBytesRead = 0;
- for (int i = 0; i < numFiles; i++) {
- String fName = (String) objectInput.readObject();
- int numBytes = objectInput.readInt();
- FileOutputStream fos = new FileOutputStream(root.getAbsolutePath() + File.separator + fName);
- BufferedOutputStream bos = new BufferedOutputStream(fos, streamBufferSize);
+ protected void fromStreamLockSafe(ObjectInput objectInput) throws CacheLoaderException {
+ try {
+ int numFiles = objectInput.readInt();
+ byte[] buffer = new byte[streamBufferSize];
+ int bytesRead, totalBytesRead = 0;
+ for (int i = 0; i < numFiles; i++) {
+ String fName = (String) objectInput.readObject();
+ int numBytes = objectInput.readInt();
+ FileOutputStream fos = new FileOutputStream(root.getAbsolutePath() + File.separator + fName);
+ BufferedOutputStream bos = new BufferedOutputStream(fos, streamBufferSize);
- while (numBytes > totalBytesRead) {
- bytesRead = objectInput.read(buffer, 0, streamBufferSize);
- if (bytesRead == -1) break;
- totalBytesRead += bytesRead;
- bos.write(buffer, 0, bytesRead);
+ while (numBytes > totalBytesRead) {
+ bytesRead = objectInput.read(buffer, 0, streamBufferSize);
+ if (bytesRead == -1) break;
+ totalBytesRead += bytesRead;
+ bos.write(buffer, 0, bytesRead);
+ }
+ bos.flush();
+ safeClose(bos);
+ fos.flush();
+ safeClose(fos);
+ totalBytesRead = 0;
}
- bos.flush();
- safeClose(bos);
- fos.flush();
- safeClose(fos);
- totalBytesRead = 0;
- }
- } catch (IOException e) {
- throw new CacheLoaderException("I/O error", e);
- } catch (ClassNotFoundException e) {
- throw new CacheLoaderException("Unexpected expcetion", e);
- }
- }
+ } catch (IOException e) {
+ throw new CacheLoaderException("I/O error", e);
+ } catch (ClassNotFoundException e) {
+ throw new CacheLoaderException("Unexpected expcetion", e);
+ }
+ }
- protected void toStreamLockSafe(ObjectOutput objectOutput) throws CacheLoaderException {
- try {
- File[] files = root.listFiles();
- objectOutput.writeInt(files.length);
- byte[] buffer = new byte[streamBufferSize];
- for (File file : files) {
- int bytesRead, totalBytesRead = 0;
- FileInputStream fileInStream = new FileInputStream(file);
- int sz = fileInStream.available();
- BufferedInputStream bis = new BufferedInputStream(fileInStream);
- objectOutput.writeObject(file.getName());
- objectOutput.writeInt(sz);
+ protected void toStreamLockSafe(ObjectOutput objectOutput) throws CacheLoaderException {
+ try {
+ File[] files = root.listFiles();
+ objectOutput.writeInt(files.length);
+ byte[] buffer = new byte[streamBufferSize];
+ for (File file : files) {
+ int bytesRead, totalBytesRead = 0;
+ FileInputStream fileInStream = new FileInputStream(file);
+ int sz = fileInStream.available();
+ BufferedInputStream bis = new BufferedInputStream(fileInStream);
+ objectOutput.writeObject(file.getName());
+ objectOutput.writeInt(sz);
- while (sz > totalBytesRead) {
- bytesRead = bis.read(buffer, 0, streamBufferSize);
- if (bytesRead == -1) break;
- totalBytesRead += bytesRead;
- objectOutput.write(buffer, 0, bytesRead);
+ while (sz > totalBytesRead) {
+ bytesRead = bis.read(buffer, 0, streamBufferSize);
+ if (bytesRead == -1) break;
+ totalBytesRead += bytesRead;
+ objectOutput.write(buffer, 0, bytesRead);
+ }
+ bis.close();
+ fileInStream.close();
}
- bis.close();
- fileInStream.close();
- }
- } catch (IOException e) {
- throw new CacheLoaderException("I/O expcetion while generating stream", e);
- }
- }
+ } catch (IOException e) {
+ throw new CacheLoaderException("I/O expcetion while generating stream", e);
+ }
+ }
- protected void clearLockSafe() throws CacheLoaderException {
- for (File f : root.listFiles()) {
- if (!f.delete()) log.warn("Had problems removing file {0}", f);
- }
- }
+ protected void clearLockSafe() throws CacheLoaderException {
+ File[] toDelete = root.listFiles();
+ if (toDelete == null) {
+ return;
+ }
+ for (File f : toDelete) {
+ f.delete();
+ if (f.exists()) log.warn("Had problems removing file {0}", f);
+ }
+ }
- protected void purgeInternal() throws CacheLoaderException {
- loadAll();
- }
+ protected void purgeInternal() throws CacheLoaderException {
+ loadAll();
+ }
- protected Bucket loadBucket(String bucketName) throws CacheLoaderException {
- return loadBucket(new File(root, bucketName));
- }
+ protected Bucket loadBucket(String bucketName) throws CacheLoaderException {
+ return loadBucket(new File(root, bucketName));
+ }
- protected Bucket loadBucket(File bucketFile) throws CacheLoaderException {
- Bucket bucket = null;
- if (bucketFile.exists()) {
- if (log.isTraceEnabled()) log.trace("Found bucket file: '" + bucketFile + "'");
- FileInputStream is = null;
- ObjectInputStream ois = null;
- try {
- is = new FileInputStream(bucketFile);
- ois = new ObjectInputStream(is);
- bucket = (Bucket) ois.readObject();
- } catch (Exception e) {
- String message = "Error while reading from file: " + bucketFile.getAbsoluteFile();
- log.error(message, e);
- throw new CacheLoaderException(message, e);
- } finally {
- safeClose(ois);
- safeClose(is);
- }
- }
- if (bucket != null) {
- bucket.setBucketName(bucketFile.getName());
- }
- return bucket;
- }
+ protected Bucket loadBucket(File bucketFile) throws CacheLoaderException {
+ Bucket bucket = null;
+ if (bucketFile.exists()) {
+ if (log.isTraceEnabled()) log.trace("Found bucket file: '" + bucketFile + "'");
+ FileInputStream is = null;
+ ObjectInputStream ois = null;
+ try {
+ is = new FileInputStream(bucketFile);
+ ois = new ObjectInputStream(is);
+ bucket = (Bucket) ois.readObject();
+ } catch (Exception e) {
+ String message = "Error while reading from file: " + bucketFile.getAbsoluteFile();
+ log.error(message, e);
+ throw new CacheLoaderException(message, e);
+ } finally {
+ safeClose(ois);
+ safeClose(is);
+ }
+ }
+ if (bucket != null) {
+ bucket.setBucketName(bucketFile.getName());
+ }
+ return bucket;
+ }
- protected void insertBucket(Bucket bucket) throws CacheLoaderException {
- saveBucket(bucket);
- }
+ protected void insertBucket(Bucket bucket) throws CacheLoaderException {
+ saveBucket(bucket);
+ }
- public void saveBucket(Bucket b) throws CacheLoaderException {
- File f = new File(root, b.getBucketName());
- if (f.exists()) {
- if (!f.delete()) log.warn("Had problems removing file {0}", f);
- } else if (log.isTraceEnabled()) {
- log.trace("Successfully deleted file: '" + f.getName() + "'");
- }
+ public void saveBucket(Bucket b) throws CacheLoaderException {
+ File f = new File(root, b.getBucketName());
+ if (f.exists()) {
+ if (!f.delete()) log.warn("Had problems removing file {0}", f);
+ } else if (log.isTraceEnabled()) {
+ log.trace("Successfully deleted file: '" + f.getName() + "'");
+ }
- if (!b.getEntries().isEmpty()) {
- FileOutputStream fos = null;
- ObjectOutputStream oos = null;
- try {
- fos = new FileOutputStream(f);
- oos = new ObjectOutputStream(fos);
- oos.writeObject(b);
- oos.flush();
- fos.flush();
- } catch (IOException ex) {
- log.error("Exception while saving bucket " + b, ex);
- throw new CacheLoaderException(ex);
- }
- finally {
- safeClose(oos);
- safeClose(fos);
- }
- }
- }
+ if (!b.getEntries().isEmpty()) {
+ FileOutputStream fos = null;
+ ObjectOutputStream oos = null;
+ try {
+ fos = new FileOutputStream(f);
+ oos = new ObjectOutputStream(fos);
+ oos.writeObject(b);
+ oos.flush();
+ fos.flush();
+ } catch (IOException ex) {
+ log.error("Exception while saving bucket " + b, ex);
+ throw new CacheLoaderException(ex);
+ }
+ finally {
+ safeClose(oos);
+ safeClose(fos);
+ }
+ }
+ }
- public Class<? extends CacheLoaderConfig> getConfigurationClass() {
- return FileCacheStoreConfig.class;
- }
+ public Class<? extends CacheLoaderConfig> getConfigurationClass() {
+ return FileCacheStoreConfig.class;
+ }
- public void start() throws CacheLoaderException {
- super.start();
- String location = config.getLocation();
- if (location == null || location.trim().length() == 0) location = "Infinispan-FileCacheStore"; // use relative path!
- location += File.separator + cache.getName();
- root = new File(location);
- if (!root.exists()) {
- if (!root.mkdirs())
+ public void start() throws CacheLoaderException {
+ super.start();
+ String location = config.getLocation();
+ if (location == null || location.trim().length() == 0)
+ location = "Infinispan-FileCacheStore"; // use relative path!
+ location += File.separator + cache.getName();
+ root = new File(location);
+ root.mkdirs();
+ if (!root.exists()) {
throw new ConfigurationException("Directory " + root.getAbsolutePath() + " does not exist and cannot be created!");
- }
- streamBufferSize = config.getStreamBufferSize();
- }
+ }
+ streamBufferSize = config.getStreamBufferSize();
+ }
- public Bucket loadBucketContainingKey(String key) throws CacheLoaderException {
- return loadBucket(key.hashCode() + "");
- }
+ public Bucket loadBucketContainingKey(String key) throws CacheLoaderException {
+ return loadBucket(key.hashCode() + "");
+ }
}
Modified: trunk/core/src/test/java/org/infinispan/loader/file/FileCacheStoreTest.java
===================================================================
--- trunk/core/src/test/java/org/infinispan/loader/file/FileCacheStoreTest.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/core/src/test/java/org/infinispan/loader/file/FileCacheStoreTest.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -8,88 +8,97 @@
import org.infinispan.loader.CacheStore;
import org.infinispan.loader.bucket.Bucket;
import org.infinispan.test.TestingUtil;
-import org.testng.annotations.Test;
+import org.testng.annotations.*;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
+import java.io.*;
@Test(groups = "unit", testName = "loader.file.FileCacheStoreTest")
public class FileCacheStoreTest extends BaseCacheStoreTest {
- private final String tmpDirectory = TestingUtil.TEST_FILES + File.separator + getClass().getSimpleName();
- private FileCacheStore fcs;
+ private FileCacheStore fcs;
+ private String tmpDirectory;
- protected CacheStore createCacheStore() throws CacheLoaderException {
- fcs = new FileCacheStore();
- FileCacheStoreConfig cfg = new FileCacheStoreConfig();
- cfg.setLocation(tmpDirectory);
- cfg.setPurgeSynchronously(true); // for more accurate unit testing
- fcs.init(cfg, getCache(), getMarshaller());
- fcs.start();
- return fcs;
- }
+ @BeforeTest
+ @Parameters({"basedir"})
+ protected void setUpTempDir(String basedir) {
+ tmpDirectory = basedir + TestingUtil.TEST_PATH + File.separator + getClass().getSimpleName();
+ }
- @Override
- public void testPreload() throws CacheLoaderException {
- super.testPreload();
- }
+ @AfterTest
+ protected void clearTempDir() {
+ TestingUtil.recursiveFileRemove(tmpDirectory);
+ new File(tmpDirectory).mkdirs();
+ }
- @Override
- public void testPurgeExpired() throws Exception {
- long lifespan = 1000;
- cs.store(InternalEntryFactory.create("k1", "v1", lifespan));
- cs.store(InternalEntryFactory.create("k2", "v2", lifespan));
- cs.store(InternalEntryFactory.create("k3", "v3", lifespan));
- assert cs.containsKey("k1");
- assert cs.containsKey("k2");
- assert cs.containsKey("k3");
- Thread.sleep(lifespan + 100);
- cs.purgeExpired();
- FileCacheStore fcs = (FileCacheStore) cs;
- assert fcs.load("k1") == null;
- assert fcs.load("k2") == null;
- assert fcs.load("k3") == null;
- }
+ protected CacheStore createCacheStore() throws CacheLoaderException {
+ clearTempDir();
+ fcs = new FileCacheStore();
+ FileCacheStoreConfig cfg = new FileCacheStoreConfig();
+ cfg.setLocation(tmpDirectory);
+ cfg.setPurgeSynchronously(true); // for more accurate unit testing
+ fcs.init(cfg, getCache(), getMarshaller());
+ fcs.start();
+ return fcs;
+ }
- public void testBucketRemoval() throws Exception {
- Bucket b;
- InternalCacheEntry se = InternalEntryFactory.create("test", "value");
- fcs.store(se);
- b = fcs.loadBucketContainingKey("test");
- assert b != null;
+ @Override
+ public void testPreload() throws CacheLoaderException {
+ super.testPreload();
+ }
- assert !b.getEntries().isEmpty();
+ @Override
+ public void testPurgeExpired() throws Exception {
+ long lifespan = 1000;
+ cs.store(InternalEntryFactory.create("k1", "v1", lifespan));
+ cs.store(InternalEntryFactory.create("k2", "v2", lifespan));
+ cs.store(InternalEntryFactory.create("k3", "v3", lifespan));
+ assert cs.containsKey("k1");
+ assert cs.containsKey("k2");
+ assert cs.containsKey("k3");
+ Thread.sleep(lifespan + 100);
+ cs.purgeExpired();
+ FileCacheStore fcs = (FileCacheStore) cs;
+ assert fcs.load("k1") == null;
+ assert fcs.load("k2") == null;
+ assert fcs.load("k3") == null;
+ }
- assert new File(fcs.root, b.getBucketName()).exists();
+ public void testBucketRemoval() throws Exception {
+ Bucket b;
+ InternalCacheEntry se = InternalEntryFactory.create("test", "value");
+ fcs.store(se);
+ b = fcs.loadBucketContainingKey("test");
+ assert b != null;
- b.removeEntry("test");
- assert b.getEntries().isEmpty();
+ assert !b.getEntries().isEmpty();
- fcs.saveBucket(b);
- assert !new File(fcs.root, b.getBucketName()).exists();
- }
+ assert new File(fcs.root, b.getBucketName()).exists();
- public void testToStream() throws Exception {
- cs.store(InternalEntryFactory.create("k1", "v1", -1, -1));
+ b.removeEntry("test");
+ assert b.getEntries().isEmpty();
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- ObjectOutputStream oos = new ObjectOutputStream(out);
- cs.toStream(new UnclosableObjectOutputStream(oos));
- oos.flush();
- oos.close();
- out.close();
+ fcs.saveBucket(b);
+ assert !new File(fcs.root, b.getBucketName()).exists();
+ }
- ObjectInputStream ois = null;
- try {
- ois = new ObjectInputStream(new ByteArrayInputStream(out.toByteArray()));
- assert ois.readInt() == 1 : "we have 3 different buckets";
- assert ois.readObject().equals("k1".hashCode() + "");
- assert ois.readInt() > 0; //size on disk
- } finally {
- if (ois != null) ois.close();
- }
- }
+ public void testToStream() throws Exception {
+ cs.store(InternalEntryFactory.create("k1", "v1", -1, -1));
+
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(out);
+ cs.toStream(new UnclosableObjectOutputStream(oos));
+ oos.flush();
+ oos.close();
+ out.close();
+
+ ObjectInputStream ois = null;
+ try {
+ ois = new ObjectInputStream(new ByteArrayInputStream(out.toByteArray()));
+ assert ois.readInt() == 1 : "we have 3 different buckets";
+ assert ois.readObject().equals("k1".hashCode() + "");
+ assert ois.readInt() > 0; //size on disk
+ } finally {
+ if (ois != null) ois.close();
+ }
+ }
}
Modified: trunk/core/src/test/java/org/infinispan/test/TestingUtil.java
===================================================================
--- trunk/core/src/test/java/org/infinispan/test/TestingUtil.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/core/src/test/java/org/infinispan/test/TestingUtil.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -32,129 +32,129 @@
import java.util.Random;
public class TestingUtil {
- private static Random random = new Random();
- public static final String TEST_FILES = "testFiles";
+ private static Random random = new Random();
+ public static final String TEST_PATH = "target" + File.separator + "tempFiles";
- /**
- * Extracts the value of a field in a given target instance using reflection, able to extract private fields as
- * well.
- *
- * @param target object to extract field from
- * @param fieldName name of field to extract
- * @return field value
- */
- public static Object extractField(Object target, String fieldName) {
- return extractField(target.getClass(), target, fieldName);
- }
+ /**
+ * Extracts the value of a field in a given target instance using reflection, able to extract private fields as
+ * well.
+ *
+ * @param target object to extract field from
+ * @param fieldName name of field to extract
+ * @return field value
+ */
+ public static Object extractField(Object target, String fieldName) {
+ return extractField(target.getClass(), target, fieldName);
+ }
- public static void replaceField(Object newValue, String fieldName, Object owner, Class baseType) {
- Field field;
- try {
- field = baseType.getDeclaredField(fieldName);
- field.setAccessible(true);
- field.set(owner, newValue);
- }
- catch (Exception e) {
- throw new RuntimeException(e);//just to simplify exception handeling
- }
- }
+ public static void replaceField(Object newValue, String fieldName, Object owner, Class baseType) {
+ Field field;
+ try {
+ field = baseType.getDeclaredField(fieldName);
+ field.setAccessible(true);
+ field.set(owner, newValue);
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);//just to simplify exception handeling
+ }
+ }
- public static Object extractField(Class type, Object target, String fieldName) {
- Field field;
- try {
- field = type.getDeclaredField(fieldName);
- field.setAccessible(true);
- return field.get(target);
- }
- catch (Exception e) {
- if (type.equals(Object.class)) {
- e.printStackTrace();
- return null;
- } else {
- // try with superclass!!
- return extractField(type.getSuperclass(), target, fieldName);
- }
- }
- }
+ public static Object extractField(Class type, Object target, String fieldName) {
+ Field field;
+ try {
+ field = type.getDeclaredField(fieldName);
+ field.setAccessible(true);
+ return field.get(target);
+ }
+ catch (Exception e) {
+ if (type.equals(Object.class)) {
+ e.printStackTrace();
+ return null;
+ } else {
+ // try with superclass!!
+ return extractField(type.getSuperclass(), target, fieldName);
+ }
+ }
+ }
- public static <T extends CommandInterceptor> T findInterceptor(Cache<?, ?> cache, Class<T> interceptorToFind) {
- for (CommandInterceptor i : cache.getAdvancedCache().getInterceptorChain()) {
- if (interceptorToFind.isInstance(i)) return interceptorToFind.cast(i);
- }
- return null;
- }
+ public static <T extends CommandInterceptor> T findInterceptor(Cache<?, ?> cache, Class<T> interceptorToFind) {
+ for (CommandInterceptor i : cache.getAdvancedCache().getInterceptorChain()) {
+ if (interceptorToFind.isInstance(i)) return interceptorToFind.cast(i);
+ }
+ return null;
+ }
- /**
- * Loops, continually calling {@link #areCacheViewsComplete(Cache[])} until it either returns true or
- * <code>timeout</code> ms have elapsed.
- *
- * @param caches caches which must all have consistent views
- * @param timeout max number of ms to loop
- * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
- * members.
- */
- public static void blockUntilViewsReceived(Cache[] caches, long timeout) {
- long failTime = System.currentTimeMillis() + timeout;
+ /**
+ * Loops, continually calling {@link #areCacheViewsComplete(Cache[])} until it either returns true or
+ * <code>timeout</code> ms have elapsed.
+ *
+ * @param caches caches which must all have consistent views
+ * @param timeout max number of ms to loop
+ * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
+ * members.
+ */
+ public static void blockUntilViewsReceived(Cache[] caches, long timeout) {
+ long failTime = System.currentTimeMillis() + timeout;
- while (System.currentTimeMillis() < failTime) {
- sleepThread(100);
- if (areCacheViewsComplete(caches)) {
- return;
- }
- }
+ while (System.currentTimeMillis() < failTime) {
+ sleepThread(100);
+ if (areCacheViewsComplete(caches)) {
+ return;
+ }
+ }
- throw new RuntimeException("timed out before caches had complete views");
- }
+ throw new RuntimeException("timed out before caches had complete views");
+ }
- /**
- * Version of blockUntilViewsReceived that uses varargs
- */
- public static void blockUntilViewsReceived(long timeout, Cache... caches) {
- blockUntilViewsReceived(caches, timeout);
- }
+ /**
+ * Version of blockUntilViewsReceived that uses varargs
+ */
+ public static void blockUntilViewsReceived(long timeout, Cache... caches) {
+ blockUntilViewsReceived(caches, timeout);
+ }
- /**
- * Version of blockUntilViewsReceived that uses varargsa and cache managers
- */
- public static void blockUntilViewsReceived(long timeout, CacheManager... cacheManagers) {
- blockUntilViewsReceived(timeout, true, cacheManagers);
- }
+ /**
+ * Version of blockUntilViewsReceived that uses varargsa and cache managers
+ */
+ public static void blockUntilViewsReceived(long timeout, CacheManager... cacheManagers) {
+ blockUntilViewsReceived(timeout, true, cacheManagers);
+ }
- /**
- * Waits for the given memebrs to be removed from the cluster. The difference between this and {@link
- * #blockUntilViewsReceived(long, org.infinispan.manager.CacheManager[])} methods(s) is that it does not barf if more
- * than expected memebers is in the cluster - this is because we expect to start with a grater number fo memebers
- * than we eventually expect. It will barf though, if the number of members is not the one expected but only after
- * the timeout expieres.
- */
- public static void blockForMemberToFail(long timeout, CacheManager... cacheManagers) {
- blockUntilViewsReceived(timeout, false, cacheManagers);
- areCacheViewsComplete(true, cacheManagers);
- }
+ /**
+ * Waits for the given memebrs to be removed from the cluster. The difference between this and {@link
+ * #blockUntilViewsReceived(long, org.infinispan.manager.CacheManager[])} methods(s) is that it does not barf if more
+ * than expected memebers is in the cluster - this is because we expect to start with a grater number fo memebers
+ * than we eventually expect. It will barf though, if the number of members is not the one expected but only after
+ * the timeout expieres.
+ */
+ public static void blockForMemberToFail(long timeout, CacheManager... cacheManagers) {
+ blockUntilViewsReceived(timeout, false, cacheManagers);
+ areCacheViewsComplete(true, cacheManagers);
+ }
- public static void blockUntilViewsReceived(long timeout, boolean barfIfTooManyMembers, CacheManager... cacheManagers) {
- long failTime = System.currentTimeMillis() + timeout;
+ public static void blockUntilViewsReceived(long timeout, boolean barfIfTooManyMembers, CacheManager... cacheManagers) {
+ long failTime = System.currentTimeMillis() + timeout;
- while (System.currentTimeMillis() < failTime) {
- sleepThread(100);
- if (areCacheViewsComplete(barfIfTooManyMembers, cacheManagers)) {
- return;
- }
- }
+ while (System.currentTimeMillis() < failTime) {
+ sleepThread(100);
+ if (areCacheViewsComplete(barfIfTooManyMembers, cacheManagers)) {
+ return;
+ }
+ }
- throw new RuntimeException("timed out before caches had complete views");
- }
+ throw new RuntimeException("timed out before caches had complete views");
+ }
- /**
- * Loops, continually calling {@link #areCacheViewsComplete(CacheSPI[])} until it either returns true or
- * <code>timeout</code> ms have elapsed.
- *
- * @param caches caches which must all have consistent views
- * @param timeout max number of ms to loop
- * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
- * members.
- */
+ /**
+ * Loops, continually calling {@link #areCacheViewsComplete(CacheSPI[])} until it either returns true or
+ * <code>timeout</code> ms have elapsed.
+ *
+ * @param caches caches which must all have consistent views
+ * @param timeout max number of ms to loop
+ * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
+ * members.
+ */
// public static void blockUntilViewsReceived(Cache[] caches, long timeout) {
// long failTime = System.currentTimeMillis() + timeout;
//
@@ -169,91 +169,91 @@
// }
- /**
- * An overloaded version of {@link #blockUntilViewsReceived(long,Cache[])} that allows for 'shrinking' clusters.
- * I.e., the usual method barfs if there are more members than expected. This one takes a param
- * (barfIfTooManyMembers) which, if false, will NOT barf but will wait until the cluster 'shrinks' to the desired
- * size. Useful if in tests, you kill a member and want to wait until this fact is known across the cluster.
- *
- * @param timeout
- * @param barfIfTooManyMembers
- * @param caches
- */
- public static void blockUntilViewsReceived(long timeout, boolean barfIfTooManyMembers, Cache... caches) {
- long failTime = System.currentTimeMillis() + timeout;
+ /**
+ * An overloaded version of {@link #blockUntilViewsReceived(long,Cache[])} that allows for 'shrinking' clusters.
+ * I.e., the usual method barfs if there are more members than expected. This one takes a param
+ * (barfIfTooManyMembers) which, if false, will NOT barf but will wait until the cluster 'shrinks' to the desired
+ * size. Useful if in tests, you kill a member and want to wait until this fact is known across the cluster.
+ *
+ * @param timeout
+ * @param barfIfTooManyMembers
+ * @param caches
+ */
+ public static void blockUntilViewsReceived(long timeout, boolean barfIfTooManyMembers, Cache... caches) {
+ long failTime = System.currentTimeMillis() + timeout;
- while (System.currentTimeMillis() < failTime) {
- sleepThread(100);
- if (areCacheViewsComplete(caches, barfIfTooManyMembers)) {
- return;
- }
- }
+ while (System.currentTimeMillis() < failTime) {
+ sleepThread(100);
+ if (areCacheViewsComplete(caches, barfIfTooManyMembers)) {
+ return;
+ }
+ }
- throw new RuntimeException("timed out before caches had complete views");
- }
+ throw new RuntimeException("timed out before caches had complete views");
+ }
- /**
- * Loops, continually calling {@link #areCacheViewsComplete(Cache[])} until it either returns true or
- * <code>timeout</code> ms have elapsed.
- *
- * @param groupSize number of caches expected in the group
- * @param timeout max number of ms to loop
- * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
- * members.
- */
- public static void blockUntilViewReceived(Cache cache, int groupSize, long timeout) {
- blockUntilViewReceived(cache, groupSize, timeout, true);
- }
+ /**
+ * Loops, continually calling {@link #areCacheViewsComplete(Cache[])} until it either returns true or
+ * <code>timeout</code> ms have elapsed.
+ *
+ * @param groupSize number of caches expected in the group
+ * @param timeout max number of ms to loop
+ * @throws RuntimeException if <code>timeout</code> ms have elapse without all caches having the same number of
+ * members.
+ */
+ public static void blockUntilViewReceived(Cache cache, int groupSize, long timeout) {
+ blockUntilViewReceived(cache, groupSize, timeout, true);
+ }
- public static void blockUntilViewReceived(Cache cache, int groupSize, long timeout, boolean barfIfTooManyMembersInView) {
- long failTime = System.currentTimeMillis() + timeout;
+ public static void blockUntilViewReceived(Cache cache, int groupSize, long timeout, boolean barfIfTooManyMembersInView) {
+ long failTime = System.currentTimeMillis() + timeout;
- while (System.currentTimeMillis() < failTime) {
- sleepThread(100);
- if (isCacheViewComplete(cache.getCacheManager().getMembers(), cache.getCacheManager().getAddress(), groupSize, barfIfTooManyMembersInView)) {
- return;
- }
- }
+ while (System.currentTimeMillis() < failTime) {
+ sleepThread(100);
+ if (isCacheViewComplete(cache.getCacheManager().getMembers(), cache.getCacheManager().getAddress(), groupSize, barfIfTooManyMembersInView)) {
+ return;
+ }
+ }
- throw new RuntimeException("timed out before caches had complete views");
- }
+ throw new RuntimeException("timed out before caches had complete views");
+ }
- /**
- * Checks each cache to see if the number of elements in the array returned by {@link CacheManager#getMembers()}
- * matches the size of the <code>caches</code> parameter.
- *
- * @param caches caches that should form a View
- * @return <code>true</code> if all caches have <code>caches.length</code> members; false otherwise
- * @throws IllegalStateException if any of the caches have MORE view members than caches.length
- */
- public static boolean areCacheViewsComplete(Cache[] caches) {
- return areCacheViewsComplete(caches, true);
- }
+ /**
+ * Checks each cache to see if the number of elements in the array returned by {@link CacheManager#getMembers()}
+ * matches the size of the <code>caches</code> parameter.
+ *
+ * @param caches caches that should form a View
+ * @return <code>true</code> if all caches have <code>caches.length</code> members; false otherwise
+ * @throws IllegalStateException if any of the caches have MORE view members than caches.length
+ */
+ public static boolean areCacheViewsComplete(Cache[] caches) {
+ return areCacheViewsComplete(caches, true);
+ }
- public static boolean areCacheViewsComplete(Cache[] caches, boolean barfIfTooManyMembers) {
- int memberCount = caches.length;
+ public static boolean areCacheViewsComplete(Cache[] caches, boolean barfIfTooManyMembers) {
+ int memberCount = caches.length;
- for (int i = 0; i < memberCount; i++) {
- if (!isCacheViewComplete(caches[i].getCacheManager().getMembers(), caches[i].getCacheManager().getAddress(), memberCount, barfIfTooManyMembers)) {
- return false;
- }
- }
+ for (int i = 0; i < memberCount; i++) {
+ if (!isCacheViewComplete(caches[i].getCacheManager().getMembers(), caches[i].getCacheManager().getAddress(), memberCount, barfIfTooManyMembers)) {
+ return false;
+ }
+ }
- return true;
- }
+ return true;
+ }
- public static boolean areCacheViewsComplete(boolean barfIfTooManyMembers, CacheManager... cacheManagers) {
- if (cacheManagers == null) throw new NullPointerException("Cache Manager array is null");
- int memberCount = cacheManagers.length;
+ public static boolean areCacheViewsComplete(boolean barfIfTooManyMembers, CacheManager... cacheManagers) {
+ if (cacheManagers == null) throw new NullPointerException("Cache Manager array is null");
+ int memberCount = cacheManagers.length;
- for (int i = 0; i < memberCount; i++) {
- if (!isCacheViewComplete(cacheManagers[i].getMembers(), cacheManagers[i].getAddress(), memberCount, barfIfTooManyMembers)) {
- return false;
- }
- }
+ for (int i = 0; i < memberCount; i++) {
+ if (!isCacheViewComplete(cacheManagers[i].getMembers(), cacheManagers[i].getAddress(), memberCount, barfIfTooManyMembers)) {
+ return false;
+ }
+ }
- return true;
- }
+ return true;
+ }
// /**
// * @param cache
@@ -286,334 +286,334 @@
// return true;
// }
- /**
- * @param c
- * @param memberCount
- */
- public static boolean isCacheViewComplete(Cache c, int memberCount) {
- return isCacheViewComplete(c.getCacheManager().getMembers(), c.getCacheManager().getAddress(), memberCount, true);
- }
+ /**
+ * @param c
+ * @param memberCount
+ */
+ public static boolean isCacheViewComplete(Cache c, int memberCount) {
+ return isCacheViewComplete(c.getCacheManager().getMembers(), c.getCacheManager().getAddress(), memberCount, true);
+ }
- public static boolean isCacheViewComplete(List members, Address address, int memberCount, boolean barfIfTooManyMembers) {
- if (members == null || memberCount > members.size()) {
- return false;
- } else if (memberCount < members.size()) {
- if (barfIfTooManyMembers) {
- // This is an exceptional condition
- StringBuilder sb = new StringBuilder("Cache at address ");
- sb.append(address);
- sb.append(" had ");
- sb.append(members.size());
- sb.append(" members; expecting ");
- sb.append(memberCount);
- sb.append(". Members were (");
- for (int j = 0; j < members.size(); j++) {
- if (j > 0) {
- sb.append(", ");
- }
- sb.append(members.get(j));
- }
- sb.append(')');
+ public static boolean isCacheViewComplete(List members, Address address, int memberCount, boolean barfIfTooManyMembers) {
+ if (members == null || memberCount > members.size()) {
+ return false;
+ } else if (memberCount < members.size()) {
+ if (barfIfTooManyMembers) {
+ // This is an exceptional condition
+ StringBuilder sb = new StringBuilder("Cache at address ");
+ sb.append(address);
+ sb.append(" had ");
+ sb.append(members.size());
+ sb.append(" members; expecting ");
+ sb.append(memberCount);
+ sb.append(". Members were (");
+ for (int j = 0; j < members.size(); j++) {
+ if (j > 0) {
+ sb.append(", ");
+ }
+ sb.append(members.get(j));
+ }
+ sb.append(')');
- throw new IllegalStateException(sb.toString());
- } else return false;
- }
+ throw new IllegalStateException(sb.toString());
+ } else return false;
+ }
- return true;
- }
+ return true;
+ }
- /**
- * Puts the current thread to sleep for the desired number of ms, suppressing any exceptions.
- *
- * @param sleeptime number of ms to sleep
- */
- public static void sleepThread(long sleeptime) {
- try {
- Thread.sleep(sleeptime);
- }
- catch (InterruptedException ie) {
- }
- }
+ /**
+ * Puts the current thread to sleep for the desired number of ms, suppressing any exceptions.
+ *
+ * @param sleeptime number of ms to sleep
+ */
+ public static void sleepThread(long sleeptime) {
+ try {
+ Thread.sleep(sleeptime);
+ }
+ catch (InterruptedException ie) {
+ }
+ }
- public static void sleepRandom(int maxTime) {
- sleepThread(random.nextInt(maxTime));
- }
+ public static void sleepRandom(int maxTime) {
+ sleepThread(random.nextInt(maxTime));
+ }
- public static void recursiveFileRemove(String directoryName) {
- File file = new File(directoryName);
- recursiveFileRemove(file);
- }
+ public static void recursiveFileRemove(String directoryName) {
+ File file = new File(directoryName);
+ recursiveFileRemove(file);
+ }
- public static void recursiveFileRemove(File file) {
- if (file.exists()) {
- System.out.println("Deleting file " + file);
- recursivedelete(file);
- }
- }
-
- private static void recursivedelete(File f) {
- if (f.isDirectory()) {
- File[] files = f.listFiles();
- for (File file : files) {
+ public static void recursiveFileRemove(File file) {
+ if (file.exists()) {
+ System.out.println("Deleting file " + file);
recursivedelete(file);
- }
- }
- //System.out.println("File " + f.toURI() + " deleted = " + f.delete());
- f.delete();
- }
+ }
+ }
- public static void killCaches(Collection caches) {
- if (caches != null) killCaches((Cache[]) caches.toArray(new Cache[]{}));
- }
+ private static void recursivedelete(File f) {
+ if (f.isDirectory()) {
+ File[] files = f.listFiles();
+ for (File file : files) {
+ recursivedelete(file);
+ }
+ }
+ //System.out.println("File " + f.toURI() + " deleted = " + f.delete());
+ f.delete();
+ }
- public static void killCacheManagers(CacheManager... cacheManagers) {
- if (cacheManagers != null) {
- for (CacheManager cm : cacheManagers) {
- if (cm != null) cm.stop();
- }
- }
- }
+ public static void killCaches(Collection caches) {
+ if (caches != null) killCaches((Cache[]) caches.toArray(new Cache[]{}));
+ }
- public static void killCacheManagers(Collection<CacheManager> cacheManagers) {
- killCacheManagers(cacheManagers.toArray(new CacheManager[cacheManagers.size()]));
- }
+ public static void killCacheManagers(CacheManager... cacheManagers) {
+ if (cacheManagers != null) {
+ for (CacheManager cm : cacheManagers) {
+ if (cm != null) cm.stop();
+ }
+ }
+ }
- /**
- * Kills a cache - stops it, clears any data in any cache loaders, and rolls back any associated txs
- */
- public static void killCaches(Cache... caches) {
- for (Cache c : caches) {
- try {
- if (c != null && c.getStatus() == ComponentStatus.RUNNING) {
- TransactionManager tm = getTransactionManager(c);
- if (tm != null) {
- try {
- tm.rollback();
- }
- catch (Exception e) {
- // don't care
- }
- }
- c.stop();
+ public static void killCacheManagers(Collection<CacheManager> cacheManagers) {
+ killCacheManagers(cacheManagers.toArray(new CacheManager[cacheManagers.size()]));
+ }
+
+ /**
+ * Kills a cache - stops it, clears any data in any cache loaders, and rolls back any associated txs
+ */
+ public static void killCaches(Cache... caches) {
+ for (Cache c : caches) {
+ try {
+ if (c != null && c.getStatus() == ComponentStatus.RUNNING) {
+ TransactionManager tm = getTransactionManager(c);
+ if (tm != null) {
+ try {
+ tm.rollback();
+ }
+ catch (Exception e) {
+ // don't care
+ }
+ }
+ c.stop();
+ }
}
- }
- catch (Throwable t) {
+ catch (Throwable t) {
- }
- }
- }
+ }
+ }
+ }
- /**
- * Clears transaction with the current thread in the given transaction manager.
- *
- * @param txManager a TransactionManager to be cleared
- */
- public static void killTransaction(TransactionManager txManager) {
- if (txManager != null) {
- try {
- txManager.rollback();
- }
- catch (Exception e) {
- // don't care
- }
- }
- }
+ /**
+ * Clears transaction with the current thread in the given transaction manager.
+ *
+ * @param txManager a TransactionManager to be cleared
+ */
+ public static void killTransaction(TransactionManager txManager) {
+ if (txManager != null) {
+ try {
+ txManager.rollback();
+ }
+ catch (Exception e) {
+ // don't care
+ }
+ }
+ }
- /**
- * Clears any associated transactions with the current thread in the caches' transaction managers.
- */
- public static void killTransactions(Cache... caches) {
- for (Cache c : caches) {
- if (c != null && c.getStatus() == ComponentStatus.RUNNING) {
- TransactionManager tm = getTransactionManager(c);
- if (tm != null) {
- try {
- tm.rollback();
- }
- catch (Exception e) {
- // don't care
- }
+ /**
+ * Clears any associated transactions with the current thread in the caches' transaction managers.
+ */
+ public static void killTransactions(Cache... caches) {
+ for (Cache c : caches) {
+ if (c != null && c.getStatus() == ComponentStatus.RUNNING) {
+ TransactionManager tm = getTransactionManager(c);
+ if (tm != null) {
+ try {
+ tm.rollback();
+ }
+ catch (Exception e) {
+ // don't care
+ }
+ }
}
- }
- }
- }
+ }
+ }
- /**
- * For testing only - introspects a cache and extracts the ComponentRegistry
- *
- * @param cache cache to introspect
- * @return component registry
- */
- public static ComponentRegistry extractComponentRegistry(Cache cache) {
- return (ComponentRegistry) extractField(cache, "componentRegistry");
- }
+ /**
+ * For testing only - introspects a cache and extracts the ComponentRegistry
+ *
+ * @param cache cache to introspect
+ * @return component registry
+ */
+ public static ComponentRegistry extractComponentRegistry(Cache cache) {
+ return (ComponentRegistry) extractField(cache, "componentRegistry");
+ }
- public static GlobalComponentRegistry extractGlobalComponentRegistry(CacheManager cacheManager) {
- return (GlobalComponentRegistry) extractField(cacheManager, "globalComponentRegistry");
- }
+ public static GlobalComponentRegistry extractGlobalComponentRegistry(CacheManager cacheManager) {
+ return (GlobalComponentRegistry) extractField(cacheManager, "globalComponentRegistry");
+ }
- public static LockManager extractLockManager(Cache cache) {
- return extractComponentRegistry(cache).getComponent(LockManager.class);
- }
+ public static LockManager extractLockManager(Cache cache) {
+ return extractComponentRegistry(cache).getComponent(LockManager.class);
+ }
- /**
- * For testing only - introspects a cache and extracts the ComponentRegistry
- *
- * @param ci interceptor chain to introspect
- * @return component registry
- */
- public static ComponentRegistry extractComponentRegistry(InterceptorChain ci) {
- return (ComponentRegistry) extractField(ci, "componentRegistry");
- }
+ /**
+ * For testing only - introspects a cache and extracts the ComponentRegistry
+ *
+ * @param ci interceptor chain to introspect
+ * @return component registry
+ */
+ public static ComponentRegistry extractComponentRegistry(InterceptorChain ci) {
+ return (ComponentRegistry) extractField(ci, "componentRegistry");
+ }
- /**
- * Replaces the existing interceptor chain in the cache wih one represented by the interceptor passed in. This
- * utility updates dependencies on all components that rely on the interceptor chain as well.
- *
- * @param cache cache that needs to be altered
- * @param interceptor the first interceptor in the new chain.
- */
- public static void replaceInterceptorChain(Cache<?, ?> cache, CommandInterceptor interceptor) {
- ComponentRegistry cr = extractComponentRegistry(cache);
- // make sure all interceptors here are wired.
- CommandInterceptor i = interceptor;
- do {
- cr.wireDependencies(i);
- }
- while ((i = i.getNext()) != null);
+ /**
+ * Replaces the existing interceptor chain in the cache wih one represented by the interceptor passed in. This
+ * utility updates dependencies on all components that rely on the interceptor chain as well.
+ *
+ * @param cache cache that needs to be altered
+ * @param interceptor the first interceptor in the new chain.
+ */
+ public static void replaceInterceptorChain(Cache<?, ?> cache, CommandInterceptor interceptor) {
+ ComponentRegistry cr = extractComponentRegistry(cache);
+ // make sure all interceptors here are wired.
+ CommandInterceptor i = interceptor;
+ do {
+ cr.wireDependencies(i);
+ }
+ while ((i = i.getNext()) != null);
- InterceptorChain inch = cr.getComponent(InterceptorChain.class);
- inch.setFirstInChain(interceptor);
- }
+ InterceptorChain inch = cr.getComponent(InterceptorChain.class);
+ inch.setFirstInChain(interceptor);
+ }
- /**
- * Retrieves the remote delegate for a given cache. It is on this remote delegate that the JGroups RPCDispatcher
- * invokes remote methods.
- *
- * @param cache cache instance for which a remote delegate is to be retrieved
- * @return remote delegate, or null if the cacge is not configured for replication.
- */
- public static CacheDelegate getInvocationDelegate(Cache cache) {
- return (CacheDelegate) cache;
- }
+ /**
+ * Retrieves the remote delegate for a given cache. It is on this remote delegate that the JGroups RPCDispatcher
+ * invokes remote methods.
+ *
+ * @param cache cache instance for which a remote delegate is to be retrieved
+ * @return remote delegate, or null if the cacge is not configured for replication.
+ */
+ public static CacheDelegate getInvocationDelegate(Cache cache) {
+ return (CacheDelegate) cache;
+ }
- /**
- * Blocks until the cache has reached a specified state.
- *
- * @param cache cache to watch
- * @param cacheStatus status to wait for
- * @param timeout timeout to wait for
- */
- public static void blockUntilCacheStatusAchieved(Cache cache, ComponentStatus cacheStatus, long timeout) {
- AdvancedCache spi = cache.getAdvancedCache();
- long killTime = System.currentTimeMillis() + timeout;
- while (System.currentTimeMillis() < killTime) {
- if (spi.getStatus() == cacheStatus) return;
- sleepThread(50);
- }
- throw new RuntimeException("Timed out waiting for condition");
- }
+ /**
+ * Blocks until the cache has reached a specified state.
+ *
+ * @param cache cache to watch
+ * @param cacheStatus status to wait for
+ * @param timeout timeout to wait for
+ */
+ public static void blockUntilCacheStatusAchieved(Cache cache, ComponentStatus cacheStatus, long timeout) {
+ AdvancedCache spi = cache.getAdvancedCache();
+ long killTime = System.currentTimeMillis() + timeout;
+ while (System.currentTimeMillis() < killTime) {
+ if (spi.getStatus() == cacheStatus) return;
+ sleepThread(50);
+ }
+ throw new RuntimeException("Timed out waiting for condition");
+ }
- public static void replicateCommand(Cache cache, VisitableCommand command) throws Throwable {
- ComponentRegistry cr = extractComponentRegistry(cache);
- InterceptorChain ic = cr.getComponent(InterceptorChain.class);
- ic.invoke(command);
- }
+ public static void replicateCommand(Cache cache, VisitableCommand command) throws Throwable {
+ ComponentRegistry cr = extractComponentRegistry(cache);
+ InterceptorChain ic = cr.getComponent(InterceptorChain.class);
+ ic.invoke(command);
+ }
- public static void blockUntilViewsReceived(int timeout, List caches) {
- blockUntilViewsReceived((Cache[]) caches.toArray(new Cache[]{}), timeout);
- }
+ public static void blockUntilViewsReceived(int timeout, List caches) {
+ blockUntilViewsReceived((Cache[]) caches.toArray(new Cache[]{}), timeout);
+ }
- public static CommandsFactory extractCommandsFactory(Cache<Object, Object> cache) {
- return (CommandsFactory) extractField(cache, "commandsFactory");
- }
+ public static CommandsFactory extractCommandsFactory(Cache<Object, Object> cache) {
+ return (CommandsFactory) extractField(cache, "commandsFactory");
+ }
- public static void dumpCacheContents(List caches) {
- System.out.println("**** START: Cache Contents ****");
- int count = 1;
- for (Object o : caches) {
- Cache c = (Cache) o;
- if (c == null) {
- System.out.println(" ** Cache " + count + " is null!");
- } else {
- System.out.println(" ** Cache " + count + " is " + c.getCacheManager().getAddress());
- }
- count++;
- }
- System.out.println("**** END: Cache Contents ****");
- }
+ public static void dumpCacheContents(List caches) {
+ System.out.println("**** START: Cache Contents ****");
+ int count = 1;
+ for (Object o : caches) {
+ Cache c = (Cache) o;
+ if (c == null) {
+ System.out.println(" ** Cache " + count + " is null!");
+ } else {
+ System.out.println(" ** Cache " + count + " is " + c.getCacheManager().getAddress());
+ }
+ count++;
+ }
+ System.out.println("**** END: Cache Contents ****");
+ }
- public static void dumpCacheContents(Cache... caches) {
- dumpCacheContents(Arrays.asList(caches));
- }
+ public static void dumpCacheContents(Cache... caches) {
+ dumpCacheContents(Arrays.asList(caches));
+ }
- /**
- * Extracts a component of a given type from the cache's internal component registry
- */
- public static <T> T extractComponent(Cache cache, Class<T> componentType) {
- ComponentRegistry cr = extractComponentRegistry(cache);
- return cr.getComponent(componentType);
- }
+ /**
+ * Extracts a component of a given type from the cache's internal component registry
+ */
+ public static <T> T extractComponent(Cache cache, Class<T> componentType) {
+ ComponentRegistry cr = extractComponentRegistry(cache);
+ return cr.getComponent(componentType);
+ }
- /**
- * Extracts a component of a given type from the cache's internal component registry
- */
- public static <T> T extractGlobalComponent(CacheManager cacheManager, Class<T> componentType) {
- GlobalComponentRegistry gcr = extractGlobalComponentRegistry(cacheManager);
- return gcr.getComponent(componentType);
- }
+ /**
+ * Extracts a component of a given type from the cache's internal component registry
+ */
+ public static <T> T extractGlobalComponent(CacheManager cacheManager, Class<T> componentType) {
+ GlobalComponentRegistry gcr = extractGlobalComponentRegistry(cacheManager);
+ return gcr.getComponent(componentType);
+ }
- public static TransactionManager getTransactionManager(Cache cache) {
- return cache == null ? null : extractComponent(cache, TransactionManager.class);
- }
+ public static TransactionManager getTransactionManager(Cache cache) {
+ return cache == null ? null : extractComponent(cache, TransactionManager.class);
+ }
- /**
- * Replaces a component in a running cache
- *
- * @param cache cache in which to replace component
- * @param componentType component type of which to replace
- * @param replacementComponent new instance
- * @param rewire if true, ComponentRegistry.rewire() is called after replacing.
- * @return the original component that was replaced
- */
- public static <T> T replaceComponent(Cache<?, ?> cache, Class<T> componentType, T replacementComponent, boolean rewire) {
- ComponentRegistry cr = extractComponentRegistry(cache);
- T old = cr.getComponent(componentType);
- cr.registerComponent(replacementComponent, componentType);
- if (rewire) cr.rewire();
- return old;
- }
+ /**
+ * Replaces a component in a running cache
+ *
+ * @param cache cache in which to replace component
+ * @param componentType component type of which to replace
+ * @param replacementComponent new instance
+ * @param rewire if true, ComponentRegistry.rewire() is called after replacing.
+ * @return the original component that was replaced
+ */
+ public static <T> T replaceComponent(Cache<?, ?> cache, Class<T> componentType, T replacementComponent, boolean rewire) {
+ ComponentRegistry cr = extractComponentRegistry(cache);
+ T old = cr.getComponent(componentType);
+ cr.registerComponent(replacementComponent, componentType);
+ if (rewire) cr.rewire();
+ return old;
+ }
- /**
- * Replaces a component in a running cache manager (global component registry)
- *
- * @param cacheManager cache in which to replace component
- * @param componentType component type of which to replace
- * @param replacementComponent new instance
- * @param rewire if true, ComponentRegistry.rewire() is called after replacing.
- * @return the original component that was replaced
- */
- public static <T> T replaceComponent(CacheManager cacheManager, Class<T> componentType, T replacementComponent, boolean rewire) {
- GlobalComponentRegistry cr = extractGlobalComponentRegistry(cacheManager);
- T old = cr.getComponent(componentType);
- cr.registerComponent(replacementComponent, componentType);
- if (rewire) {
- cr.rewire();
- cr.rewireNamedRegistries();
- }
- return old;
- }
+ /**
+ * Replaces a component in a running cache manager (global component registry)
+ *
+ * @param cacheManager cache in which to replace component
+ * @param componentType component type of which to replace
+ * @param replacementComponent new instance
+ * @param rewire if true, ComponentRegistry.rewire() is called after replacing.
+ * @return the original component that was replaced
+ */
+ public static <T> T replaceComponent(CacheManager cacheManager, Class<T> componentType, T replacementComponent, boolean rewire) {
+ GlobalComponentRegistry cr = extractGlobalComponentRegistry(cacheManager);
+ T old = cr.getComponent(componentType);
+ cr.registerComponent(replacementComponent, componentType);
+ if (rewire) {
+ cr.rewire();
+ cr.rewireNamedRegistries();
+ }
+ return old;
+ }
- public static CacheLoader getCacheLoader(Cache cache) {
- CacheLoaderManager clm = extractComponent(cache, CacheLoaderManager.class);
- if (clm != null && clm.isEnabled()) {
- return clm.getCacheLoader();
- } else {
- return null;
- }
- }
+ public static CacheLoader getCacheLoader(Cache cache) {
+ CacheLoaderManager clm = extractComponent(cache, CacheLoaderManager.class);
+ if (clm != null && clm.isEnabled()) {
+ return clm.getCacheLoader();
+ } else {
+ return null;
+ }
+ }
}
Modified: trunk/core/src/test/java/org/infinispan/test/testng/SuiteResourcesAndLogTest.java
===================================================================
--- trunk/core/src/test/java/org/infinispan/test/testng/SuiteResourcesAndLogTest.java 2009-04-15 14:21:50 UTC (rev 125)
+++ trunk/core/src/test/java/org/infinispan/test/testng/SuiteResourcesAndLogTest.java 2009-04-15 14:34:45 UTC (rev 126)
@@ -20,18 +20,6 @@
private static Log log = LogFactory.getLog(SuiteResourcesAndLogTest.class);
- @BeforeSuite
- @AfterSuite
- public void removeTempDir() {
- TestingUtil.recursiveFileRemove(TestingUtil.TEST_FILES);
- log("Removing all the files from " + TestingUtil.TEST_FILES);
- File file = new File(TestingUtil.TEST_FILES);
- if (file.exists()) {
- System.err.println("!!!!!!!!!!!!! Directory '" + TestingUtil.TEST_FILES + "' should have been deleted!!!");
- } else {
- log("Successfully removed folder: '" + TestingUtil.TEST_FILES + "'");
- }
- }
@BeforeSuite
@AfterSuite
More information about the infinispan-commits
mailing list