[jboss-cvs] JBossCache/old/tests/perf/org/jboss/cache/aop ...
Ben Wang
bwang at jboss.com
Tue Oct 31 03:01:14 EST 2006
User: bwang
Date: 06/10/31 03:01:14
Added: old/tests/perf/org/jboss/cache/aop
LocalPerfAopTest.java Server.java
ReplicatedSyncMapContentionAopTest.java
LocalMapPerfAopTest.java
ReplicatedSyncPerfAopTest.java
ReplicatedSyncMapPerfAopTest.java
ReplicatedAsyncMapPerfAopTest.java
FieldUpdateEmulator.java StudentMetrics.java
Log:
Deprecated files moved to old dir.
Revision Changes Path
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/LocalPerfAopTest.java
Index: LocalPerfAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.lock.LockStrategyFactory;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.Properties;
/**
* Local mode performance test for PojoCache.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class LocalPerfAopTest extends TestCase
{
PojoCache cache_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.LOCAL;
final static Properties p_;
// final static Log log_=LogFactory.getLog(LocalPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
ArrayList nodeList_;
static final int depth_ = 3;
static final int children_ = 4;
DummyTransactionManager tm_;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public LocalPerfAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
initCaches(Configuration.CacheMode.LOCAL);
nodeList_ = nodeGen(depth_, children_);
tm_ = new DummyTransactionManager();
log("LocalPerfAopTest: cacheMode=LOCAL, one cache");
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
destroyCaches();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
}
void initCaches(Configuration.CacheMode caching_mode) throws Exception
{
cachingMode_ = caching_mode;
cache_ = new PojoCache();
cache_.setConfiguration(new XmlConfigurationParser().parseFile("META-INF/local-service.xml"));
cache_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache_.start();
}
void destroyCaches() throws Exception
{
cache_.stop();
cache_ = null;
}
public void testAll() throws Exception
{
log("=== No transaction ===");
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(hasTx);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
protected void setLevelRW()
{
log("set lock level to RWUpgrade ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.REPEATABLE_READ);
}
protected void setLevelSerial()
{
log("set lock level to SimpleLock ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.SERIALIZABLE);
}
public void testAllTx_RWLock() throws Exception
{
setLevelRW();
allTx();
}
public void testAllTx_SimpleLock() throws Exception
{
setLevelSerial();
allTx();
}
protected void allTx() throws Exception
{
log("=== With transaction ===");
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = true;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(hasTx);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
private int _add(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
String value = Integer.toString(i);
if (hasTx)
{
tx.begin();
cache_.put((String) nodeList_.get(i), key, value);
tx.commit();
}
else
{
cache_.put((String) nodeList_.get(i), key, value);
}
}
return nodeList_.size();
}
private int _get(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
if (hasTx)
{
tx.begin();
cache_.get((String) nodeList_.get(i), key);
tx.commit();
}
else
{
cache_.get((String) nodeList_.get(i), key);
}
}
return nodeList_.size();
}
private int _remove(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
if (hasTx)
{
tx.begin();
cache_.remove((String) nodeList_.get(i), key);
tx.commit();
}
else
{
cache_.remove((String) nodeList_.get(i), key);
}
}
return nodeList_.size();
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
* This strucutre is used to add, get, and remove for each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(LocalPerfAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/Server.java
Index: Server.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import java.io.InputStreamReader;
/**
* A standalone dummy server that accepts data from other replicated cache.
*
* @deprecated 1.4 User should use the script under tests/script now.
*/
public class Server
{
PojoCache cache_;
void initCach() throws Exception
{
cache_ = new PojoCache();
//config.configure(cache_, "META-INF/replSync-service.xml"); // read in generic replAsync xml
XmlConfigurationParser parser = new XmlConfigurationParser();
Configuration c = parser.parseFile("META-INF/replSync-service.xml");
cache_.setConfiguration(c);
c.setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
cache_.start();
}
void destroyCache() throws Exception
{
cache_.stop();
cache_ = null;
}
PojoCache getCache()
{
return cache_;
}
public static void main(String[] args) throws Exception
{
Server server = new Server();
server.initCach();
boolean isYes = true;
InputStreamReader reader = new InputStreamReader(System.in);
while (isYes)
{
System.out.println("To abort hit cntrl-c");
// System.out.println(server.getCache().printLockInfo());
try
{
Thread.sleep(10000);
}
catch (Exception ex)
{
}
;
}
server.destroyCache();
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/ReplicatedSyncMapContentionAopTest.java
Index: ReplicatedSyncMapContentionAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.lock.LockStrategyFactory;
import org.jboss.cache.lock.TimeoutException;
import org.jboss.cache.lock.UpgradeException;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Replicated mode performance test for PojoCache to test out contention.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class ReplicatedSyncMapContentionAopTest extends TestCase
{
PojoCache cache1_, cache2_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.REPL_ASYNC;
final static Properties p_;
// final static Log log_=LogFactory.getLog(LocalPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
DummyTransactionManager tm_;
Map[] proxyMaps_ = null;
ArrayList nodeList_;
static final int depth_ = 1;
static final int children_ = 1;
static final int mapValueSize_ = 5;
static final String seed1_ = "This is a test. ";
static final String seed2_ = "THAT is a TEST. ";
StringBuffer originalStrBuf_;
StringBuffer newStrBuf_;
static Throwable ex_ = null;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public ReplicatedSyncMapContentionAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
cache1_ = initCaches();
cache2_ = initCaches();
tm_ = new DummyTransactionManager();
Thread.sleep(5000);
originalStrBuf_ = new StringBuffer();
newStrBuf_ = new StringBuffer();
generateString();
log("ReplicatedSyncPerfAopTest: cacheMode=ReplSync");
nodeList_ = nodeGen(depth_, children_);
populateNode();
}
private void generateString()
{
int length = seed1_.length();
boolean isTrue = false;
while (originalStrBuf_.length() < mapValueSize_)
{
originalStrBuf_.append(seed1_);
newStrBuf_.append(seed2_);
}
}
private void populateNode() throws Exception
{
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
proxyMaps_ = new Map[nodeList_.size()];
long time1 = System.currentTimeMillis();
int nOps = 0;
for (int i = 0; i < nodeList_.size(); i++)
{
// String key = Integer.toString(i);
// put the cache in the aop first
Map map = populateMap();
cache1_.putObject((String) nodeList_.get(i), map);
proxyMaps_[i] = (Map) cache1_.getObject((String) nodeList_.get(i));
}
nOps = nodeList_.size();
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for one putObject and getObject entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
private Map populateMap()
{
Map map = new HashMap();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
String value = originalStrBuf_.toString();
map.put(key, value);
}
return map;
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
destroyCaches();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
proxyMaps_ = null;
}
PojoCache initCaches() throws Exception
{
PojoCache cache_;
cache_ = new PojoCache();
cache_.setConfiguration(new XmlConfigurationParser().parseFile("META-INF/replSync-service.xml"));
cache_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
// cache_.setCacheMode(cachingMode_);
cache_.start();
return cache_;
// org.jgroups.log.Trace.init();
}
void destroyCaches() throws Exception
{
}
public void testDummy()
{
}
public void testAll() throws Exception
{
log("=== Start ===");
// try { Thread.sleep(5000); } catch (Exception ex) {};
boolean hasTx = true;
// Step 1. Add entries to the cache
Loader la = new Loader(nodeList_, proxyMaps_, hasTx, cache1_);
Loader lb = new Loader(nodeList_, proxyMaps_, hasTx, cache2_);
// la.start();
// lb.start();
// la.join();
// lb.join();
log("=== End ===");
}
protected void setLevelRW()
{
log("set lock level to RWUpgrade ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.REPEATABLE_READ);
}
protected void XsetLevelSerial()
{
log("set lock level to SimpleLock ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.SERIALIZABLE);
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
* This strucutre is used to add, get, and remove for each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(ReplicatedSyncMapContentionAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
public static class Loader extends Thread
{
List nodeList_;
Map[] proxyMaps_;
boolean hasTx;
PojoCache cache_;
public Loader(List nodeList, Map[] proxyMaps, boolean hasTx, PojoCache cache)
{
nodeList_ = nodeList;
proxyMaps_ = proxyMaps;
this.hasTx = hasTx;
cache_ = cache;
}
public void run()
{
long counter = 0;
while (ex_ == null && counter++ < 200)
{
long time = System.currentTimeMillis();
try
{
Thread.sleep(10000);
}
catch (InterruptedException e)
{
}
try
{
_put();
// _get();
}
catch (Exception e)
{
if (e.getCause() instanceof UpgradeException ||
e.getCause() instanceof TimeoutException)
{
System.err.println("TimeoutException: " + e.getCause());
continue;
}
ex_ = e;
throw new IllegalStateException("Exception in put(): " + e);
}
System.out.println("Loop counter: " + counter++);
}
}
private void _put() throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
String value = "Test";
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].put(key, value);
tx.commit();
}
else
{
proxyMaps_[i].put(key, value);
}
}
}
private void _get() throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
String value = "Test";
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
proxyMaps_[i].get(key);
}
}
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/LocalMapPerfAopTest.java
Index: LocalMapPerfAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.lock.LockStrategyFactory;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* Local mode performance test for TreeCache.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class LocalMapPerfAopTest extends TestCase
{
PojoCache cache_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.LOCAL;
final static Properties p_;
// final static Log log_=LogFactory.getLog(LocalPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
DummyTransactionManager tm_;
Map[] proxyMaps_ = null;
ArrayList nodeList_;
static final int depth_ = 3;
static final int children_ = 4;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public LocalMapPerfAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
initCaches(Configuration.CacheMode.LOCAL);
tm_ = new DummyTransactionManager();
log("ReplicatedAsyncPerfAopTest: cacheMode=Local");
nodeList_ = nodeGen(depth_, children_);
populateNode();
}
private void populateNode() throws Exception
{
proxyMaps_ = new Map[nodeList_.size()];
for (int i = 0; i < nodeList_.size(); i++)
{
// String key = Integer.toString(i);
// put the cache in the aop first
Map map = populateMap();
cache_.putObject((String) nodeList_.get(i), map);
proxyMaps_[i] = (Map) cache_.getObject((String) nodeList_.get(i));
}
}
private Map populateMap()
{
Map map = new HashMap();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
String value = "This is a test for performance of get and set of regular treecache and aop";
map.put(key, value);
}
return map;
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
destroyCaches();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
proxyMaps_ = null;
}
void initCaches(Configuration.CacheMode caching_mode) throws Exception
{
cachingMode_ = caching_mode;
cache_ = new PojoCache();
cache_.setConfiguration(new XmlConfigurationParser().parseFile("META-INF/local-service.xml"));
cache_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
cache_.start();
}
void destroyCaches() throws Exception
{
cache_.stop();
cache_ = null;
}
public void testAll() throws Exception
{
log("=== Transaction ===");
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = 0;
nOps = _put(hasTx);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _put is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
protected void setLevelRW()
{
log("set lock level to RWUpgrade ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.REPEATABLE_READ);
}
protected void setLevelSerial()
{
log("set lock level to SimpleLock ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.SERIALIZABLE);
}
private int _put(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
String value = "This is a modified value for the unit testing";
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
Map map = (Map) cache_.getObject((String) nodeList_.get(i));
if (hasTx)
{
tx.begin();
// proxyMaps_[i].put(key, value);
map.put(key, value);
tx.commit();
}
else
{
// proxyMaps_[i].put(key, value);
map.put(key, value);
}
}
return nodeList_.size();
}
private int _get(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + " value: " +str);
tx.commit();
}
else
{
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + " value: " +str);
}
}
return nodeList_.size();
}
private int _remove(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].remove(key);
tx.commit();
}
else
{
proxyMaps_[i].remove(key);
}
}
return nodeList_.size();
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
* This strucutre is used to add, get, and remove for each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(LocalMapPerfAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/ReplicatedSyncPerfAopTest.java
Index: ReplicatedSyncPerfAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.TreeCache;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.Properties;
/**
* Replicated synchronous mode performance test for transactional TreeCache.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class ReplicatedSyncPerfAopTest extends TestCase
{
PojoCache cache1_, cache2_, cache3_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.REPL_SYNC;
final String groupName_ = "TreeCacheTestGroup";
final static Properties p_;
// final static Log log_=LogFactory.getLog(ReplicatedSyncPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
ArrayList nodeList_;
// (4, 4) combination will generate 340 nodes.
static final int depth_ = 3;
static final int children_ = 3;
DummyTransactionManager tm_;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public ReplicatedSyncPerfAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
nodeList_ = nodeGen(depth_, children_);
tm_ = new DummyTransactionManager();
log("ReplicatedSyncPerfAopTest: cacheMode=REPL_SYNC");
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
}
PojoCache createCache() throws Exception
{
PojoCache cache = new PojoCache();
cache.setConfiguration(new XmlConfigurationParser().parseFile("META-INF/replSync-service.xml"));
cache.getConfiguration().setIsolationLevel(IsolationLevel.REPEATABLE_READ);
cache.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
return cache;
}
void destroyCache(TreeCache cache) throws Exception
{
cache.stop();
cache = null;
}
public void testOneCacheTx() throws Exception
{
log("=== 1 cache with transaction (no concurrent access) ===");
cache1_ = createCache();
cache1_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache1_.start();
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = true;
boolean oneTxOnly = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(cache1_, hasTx, oneTxOnly);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(cache1_, hasTx, oneTxOnly);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(cache1_, hasTx, oneTxOnly);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
destroyCache(cache1_);
}
public void test2CachesTx() throws Exception
{
log("=== 2 caches with transaction (no concurrent access) ===");
cache1_ = createCache();
cache2_ = createCache();
cache1_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache2_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache1_.start();
cache2_.start();
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = true;
boolean oneTxOnly = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(cache1_, hasTx, oneTxOnly);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(cache1_, hasTx, oneTxOnly);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(cache2_, hasTx, oneTxOnly); // Note we remove nodes from cache2.
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
destroyCache(cache1_);
destroyCache(cache2_);
}
public void test2CachesOneTxOnly() throws Exception
{
log("=== 2 caches with single transaction only (no concurrent access) ===");
cache1_ = createCache();
cache2_ = createCache();
cache1_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache2_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache1_.start();
cache2_.start();
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = true;
boolean oneTxOnly = true;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(cache1_, hasTx, oneTxOnly);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(cache1_, hasTx, oneTxOnly);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(cache2_, hasTx, oneTxOnly); // Note we remove nodes from cache2.
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
destroyCache(cache1_);
destroyCache(cache2_);
}
public void test3CachesTx() throws Exception
{
log("=== 3 caches with transaction (no concurrent access) ===");
cache1_ = createCache();
cache2_ = createCache();
cache3_ = createCache();
cache1_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache2_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache2_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache3_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.JBossTransactionManagerLookup");
cache1_.start();
cache2_.start();
cache3_.start();
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = true;
boolean oneTxOnly = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = _add(cache1_, hasTx, oneTxOnly);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for _add is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(cache2_, hasTx, oneTxOnly); // Note query is from cache2
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _get is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(cache3_, hasTx, oneTxOnly); // Note remove is from cache3
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for _remove is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
destroyCache(cache1_);
destroyCache(cache2_);
destroyCache(cache3_);
}
private int _add(TreeCache cache, boolean hasTx, boolean oneTxOnly) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
if (hasTx && oneTxOnly)
{
tx.begin();
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
String value = Integer.toString(i);
if (hasTx && !oneTxOnly)
{
tx.begin();
cache.put((String) nodeList_.get(i), key, value);
tx.commit();
}
else
{
cache.put((String) nodeList_.get(i), key, value);
}
}
if (hasTx && oneTxOnly)
{
tx.commit();
}
return nodeList_.size();
}
private int _get(TreeCache cache, boolean hasTx, boolean oneTxOnly) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
if (hasTx && oneTxOnly)
{
tx.begin();
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
if (hasTx && !oneTxOnly)
{
tx.begin();
cache.get((String) nodeList_.get(i), key);
tx.commit();
}
else
{
cache.get((String) nodeList_.get(i), key);
}
}
if (hasTx && oneTxOnly)
{
tx.commit();
}
return nodeList_.size();
}
private int _remove(TreeCache cache, boolean hasTx, boolean oneTxOnly) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
if (hasTx && oneTxOnly)
{
tx.begin();
}
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i);
if (hasTx && !oneTxOnly)
{
tx.begin();
cache.remove((String) nodeList_.get(i), key);
tx.commit();
}
else
{
cache.remove((String) nodeList_.get(i), key);
}
}
if (hasTx && oneTxOnly)
{
tx.commit();
}
return nodeList_.size();
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(ReplicatedSyncPerfAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/ReplicatedSyncMapPerfAopTest.java
Index: ReplicatedSyncMapPerfAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.lock.LockStrategyFactory;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* Local mode performance test for TreeCache.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class ReplicatedSyncMapPerfAopTest extends TestCase
{
PojoCache cache_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.LOCAL;
final static Properties p_;
// final static Log log_=LogFactory.getLog(LocalPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
DummyTransactionManager tm_;
Map[] proxyMaps_ = null;
ArrayList nodeList_;
static final int depth_ = 3;
static final int children_ = 4;
static final int mapValueSize_ = 100;
static final String seed1_ = "This is a test. ";
static final String seed2_ = "THAT is a TEST. ";
StringBuffer originalStrBuf_;
StringBuffer newStrBuf_;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public ReplicatedSyncMapPerfAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
initCaches(Configuration.CacheMode.LOCAL);
tm_ = new DummyTransactionManager();
originalStrBuf_ = new StringBuffer();
newStrBuf_ = new StringBuffer();
generateString();
log("ReplicatedSyncPerfAopTest: cacheMode=ReplSync");
nodeList_ = nodeGen(depth_, children_);
populateNode();
}
private void generateString()
{
while (originalStrBuf_.length() < mapValueSize_)
{
originalStrBuf_.append(seed1_);
newStrBuf_.append(seed2_);
}
}
private void populateNode() throws Exception
{
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
proxyMaps_ = new Map[nodeList_.size()];
long time1 = System.currentTimeMillis();
int nOps = 0;
for (int i = 0; i < nodeList_.size(); i++)
{
// String key = Integer.toString(i);
// put the cache in the aop first
Map map = populateMap();
cache_.putObject((String) nodeList_.get(i), map);
proxyMaps_[i] = (Map) cache_.getObject((String) nodeList_.get(i));
}
nOps = nodeList_.size();
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for one putObject and getObject entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
private Map populateMap()
{
Map map = new HashMap();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
String value = originalStrBuf_.toString();
map.put(key, value);
}
return map;
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
destroyCaches();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
proxyMaps_ = null;
}
void initCaches(Configuration.CacheMode caching_mode) throws Exception
{
cachingMode_ = caching_mode;
cache_ = new PojoCache();
cache_.setConfiguration(new XmlConfigurationParser().parseFile("META-INF/replSync-service.xml"));
cache_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
cache_.start();
// org.jgroups.log.Trace.init();
}
void destroyCaches() throws Exception
{
cache_.stop();
cache_ = null;
}
public void testAll() throws Exception
{
log("=== Start ===");
// try { Thread.sleep(5000); } catch (Exception ex) {};
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = 0;
nOps = _put(hasTx);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for put entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for get entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for remove entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
log("=== End ===");
}
protected void setLevelRW()
{
log("set lock level to RWUpgrade ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.REPEATABLE_READ);
}
protected void setLevelSerial()
{
log("set lock level to SimpleLock ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.SERIALIZABLE);
}
private int _put(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
String value = newStrBuf_.toString();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].put(key, value);
tx.commit();
}
else
{
proxyMaps_[i].put(key, value);
}
}
return nodeList_.size();
}
private int _get(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + " value: " +str);
tx.commit();
}
else
{
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + ", value: " +str);
}
}
return nodeList_.size();
}
private int _remove(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].remove(key);
tx.commit();
}
else
{
proxyMaps_[i].remove(key);
}
}
return nodeList_.size();
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
* This strucutre is used to add, get, and remove for each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(ReplicatedSyncMapPerfAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/ReplicatedAsyncMapPerfAopTest.java
Index: ReplicatedAsyncMapPerfAopTest.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.lock.IsolationLevel;
import org.jboss.cache.lock.LockStrategyFactory;
import org.jboss.cache.transaction.DummyTransactionManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.transaction.UserTransaction;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* Local mode performance test for TreeCache.
*
* @version $Revision: 1.1 $
* @author<a href="mailto:bwang at jboss.org">Ben Wang</a> May 20 2003
*/
public class ReplicatedAsyncMapPerfAopTest extends TestCase
{
PojoCache cache_;
Configuration.CacheMode cachingMode_ = Configuration.CacheMode.LOCAL;
final static Properties p_;
// final static Log log_=LogFactory.getLog(LocalPerfAopTest.class);
String oldFactory_ = null;
final String FACTORY = "org.jboss.cache.transaction.DummyContextFactory";
DummyTransactionManager tm_;
Map[] proxyMaps_ = null;
ArrayList nodeList_;
static final int depth_ = 3;
static final int children_ = 4;
static final int mapValueSize_ = 100;
static final String seed1_ = "This is a test. ";
static final String seed2_ = "THAT is a TEST. ";
StringBuffer originalStrBuf_;
StringBuffer newStrBuf_;
static
{
p_ = new Properties();
p_.put(Context.INITIAL_CONTEXT_FACTORY, "org.jboss.cache.transaction.DummyContextFactory");
}
public ReplicatedAsyncMapPerfAopTest(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
oldFactory_ = System.getProperty(Context.INITIAL_CONTEXT_FACTORY);
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, FACTORY);
DummyTransactionManager.getInstance();
initCaches(Configuration.CacheMode.LOCAL);
tm_ = new DummyTransactionManager();
originalStrBuf_ = new StringBuffer();
newStrBuf_ = new StringBuffer();
generateString();
log("ReplicatedAsyncPerfAopTest: cacheMode=ReplAsync");
nodeList_ = nodeGen(depth_, children_);
populateNode();
}
private void generateString()
{
int length = seed1_.length();
boolean isTrue = false;
while (originalStrBuf_.length() < mapValueSize_)
{
originalStrBuf_.append(seed1_);
newStrBuf_.append(seed2_);
}
}
private void populateNode() throws Exception
{
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
proxyMaps_ = new Map[nodeList_.size()];
long time1 = System.currentTimeMillis();
int nOps = 0;
for (int i = 0; i < nodeList_.size(); i++)
{
// String key = Integer.toString(i);
// put the cache in the aop first
Map map = populateMap();
cache_.putObject((String) nodeList_.get(i), map);
proxyMaps_[i] = (Map) cache_.getObject((String) nodeList_.get(i));
}
nOps = nodeList_.size();
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for one putObject and getObject entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
}
private Map populateMap()
{
Map map = new HashMap();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
String value = originalStrBuf_.toString();
map.put(key, value);
}
return map;
}
public void tearDown() throws Exception
{
super.tearDown();
DummyTransactionManager.destroy();
destroyCaches();
if (oldFactory_ != null)
{
System.setProperty(Context.INITIAL_CONTEXT_FACTORY, oldFactory_);
oldFactory_ = null;
}
proxyMaps_ = null;
}
void initCaches(Configuration.CacheMode caching_mode) throws Exception
{
cachingMode_ = caching_mode;
cache_ = new PojoCache();
//PropertyConfigurator config = new PropertyConfigurator();
XmlConfigurationParser parser = new XmlConfigurationParser();
Configuration c = parser.parseFile("META-INF/replAsync-service.xml");
//config.configure(cache_, ); // read in generic replAsync xml
c.setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
cache_.setConfiguration(c);
cache_.start();
}
void destroyCaches() throws Exception
{
cache_.stop();
cache_ = null;
}
public void testAll() throws Exception
{
log("=== Start ===");
// try { Thread.sleep(5000); } catch (Exception ex) {};
// Formating
DecimalFormat form = new DecimalFormat("#.00");
FieldPosition fieldPos = new FieldPosition(0);
StringBuffer dumbStr = new StringBuffer();
boolean hasTx = false;
// Step 1. Add entries to the cache
long time1 = System.currentTimeMillis();
int nOps = 0;
nOps = _put(hasTx);
long time2 = System.currentTimeMillis();
double d = (double) (time2 - time1) / nOps;
log("Time elapsed for put entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 2. Query the cache
time1 = System.currentTimeMillis();
nOps = _get(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for get entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
dumbStr = new StringBuffer();
// Step 3. Remove entries from the cache
time1 = System.currentTimeMillis();
nOps = _remove(hasTx);
time2 = System.currentTimeMillis();
d = (double) (time2 - time1) / nOps;
log("Time elapsed for remove entry is " + (time2 - time1) + " with " + nOps
+ " operations. Average per ops is: " + form.format(d, dumbStr, fieldPos) +
" msec.");
log("=== End ===");
}
protected void setLevelRW()
{
log("set lock level to RWUpgrade ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.REPEATABLE_READ);
}
protected void setLevelSerial()
{
log("set lock level to SimpleLock ...");
LockStrategyFactory.setIsolationLevel(IsolationLevel.SERIALIZABLE);
}
private int _put(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
String value = newStrBuf_.toString();
for (int i = 0; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].put(key, value);
tx.commit();
}
else
{
proxyMaps_[i].put(key, value);
}
}
return nodeList_.size();
}
private int _get(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + " value: " +str);
tx.commit();
}
else
{
String str = (String) proxyMaps_[i].get(key);
// log("_get(): key: " + key + ", value: " +str);
}
}
return nodeList_.size();
}
private int _remove(boolean hasTx) throws Exception
{
UserTransaction tx = null;
if (hasTx)
{
tx = (UserTransaction) new InitialContext(p_).lookup("UserTransaction");
}
for (int i = 1; i < nodeList_.size(); i++)
{
String key = Integer.toString(i) + "aop";
if (hasTx)
{
tx.begin();
proxyMaps_[i].remove(key);
tx.commit();
}
else
{
proxyMaps_[i].remove(key);
}
}
return nodeList_.size();
}
/**
* Generate the tree nodes quasi-exponentially. I.e., depth is the level
* of the hierarchy and children is the number of children under each node.
* This strucutre is used to add, get, and remove for each node.
*/
private ArrayList nodeGen(int depth, int children)
{
ArrayList strList = new ArrayList();
ArrayList oldList = new ArrayList();
ArrayList newList = new ArrayList();
oldList.add("/");
newList.add("/");
strList.add("/");
while (depth > 0)
{
// Trying to produce node name at this depth.
newList = new ArrayList();
for (int i = 0; i < oldList.size(); i++)
{
for (int j = 0; j < children; j++)
{
String tmp = (String) oldList.get(i);
tmp += Integer.toString(j);
if (depth != 1) tmp += "/";
newList.add(tmp);
}
}
strList.addAll(newList);
oldList = newList;
depth--;
}
log("Nodes generated: " + strList.size());
return strList;
}
public static Test suite() throws Exception
{
return new TestSuite(ReplicatedAsyncMapPerfAopTest.class);
}
private void log(String str)
{
// System.out.println(this.getClass().getName() +": " +str);
System.out.println(str);
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/FieldUpdateEmulator.java
Index: FieldUpdateEmulator.java
===================================================================
/*
* JBoss, Home of Professional Open Source
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import EDU.oswego.cs.dl.util.concurrent.CountDown;
import org.jboss.cache.aop.PojoCache;
import org.jboss.cache.data.Address;
import org.jboss.cache.data.Course;
import org.jboss.cache.data.RandomString;
import org.jboss.cache.data.Student;
import org.jboss.cache.factories.XmlConfigurationParser;
import org.jboss.cache.transaction.DummyTransactionManager;
import org.jboss.cache.TreeCache;
import org.jboss.cache.CacheException;
import org.jboss.cache.AbstractCacheListener;
import org.jgroups.View;
import javax.transaction.SystemException;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import java.util.StringTokenizer;
import java.util.Vector;
/**
* Use the same configuration as PojoCache but don't do any AOP. Just emulates it.
*/
public class FieldUpdateEmulator
{
static PojoCache cache_;
Properties props_ = new Properties();
static int threads_; // how many threads to send the put.
static int loops_; // how many loops
static int sleepIntervalInMillis_; // sleep interval between request
static boolean randomSleep_ = false;
static int objectListSize_; // what is the list size in the Courses list
final static String ROOT = "/JSESSION/localhost/"; // emulates the http test
static boolean isReady_ = false;
static Throwable ex_;
static long exceptionCounts_ = 0;
static CountDown countdown_;
static boolean receiver_ = false;
static boolean transaction_ = false;
static Random random = new Random(10);
static int updatePojoInterval_ = 0;
static boolean runPojoCache_ = false;
static long startTime_ = 0;
static long endTime_ = 0;
static int operationType_ = 1;
void initCache() throws Exception
{
cache_ = new PojoCache();
String file = (String) props_.get("cache_config");
if (file == null)
{
throw new RuntimeException("Cache config xml is not specified.");
}
cache_.setConfiguration(new XmlConfigurationParser().parseFile(file));
cache_.getConfiguration().setTransactionManagerLookupClass("org.jboss.cache.DummyTransactionManagerLookup");
}
void destroyCache() throws Exception
{
cache_.stop();
cache_ = null;
}
TreeCache getCache()
{
return cache_;
}
void parseConfig(String fileName) throws Exception
{
String line;
StringTokenizer st;
String key, val;
BufferedReader fileReader = new BufferedReader(new FileReader(fileName));
while ((line = fileReader.readLine()) != null)
{
if (line.startsWith("#"))
continue;
line = line.trim();
if (line.length() == 0)
continue;
st = new StringTokenizer(line, "=", false);
key = st.nextToken().toLowerCase();
val = st.nextToken();
props_.put(key, val);
System.out.println("Read in config key, value: " + key + " " + val);
}
fileReader.close();
}
void startLoadTest() throws InterruptedException, CacheException
{
String str = (String) props_.get("threads");
if (str == null)
throw new RuntimeException("Can't find threads property");
threads_ = Integer.parseInt(str);
str = (String) props_.get("loops");
if (str == null)
throw new RuntimeException("Can't find loops property");
loops_ = Integer.parseInt(str);
str = (String) props_.get("sleep_interval_millis");
if (str == null)
throw new RuntimeException("Can't find sleepIntervalInMillis property");
sleepIntervalInMillis_ = Integer.parseInt(str);
str = (String) props_.get("random_sleep_interval");
if (str == null)
throw new RuntimeException("Can't find random_sleep_interval property");
randomSleep_ = new Boolean(str).booleanValue();
str = (String) props_.get("object_list_size");
if (str == null)
throw new RuntimeException("Can't find objectListSize property");
objectListSize_ = Integer.parseInt(str);
str = (String) props_.get("transaction");
if (str == null)
throw new RuntimeException("Can't find transaction property");
transaction_ = new Boolean(str).booleanValue();
/*
str = (String)props_.get("operation_type");
if(str == null)
throw new RuntimeException("Can't find operation_type property");
operationType_ = Integer.parseInt(str); */
str = (String) props_.get("run_pojocache");
if (str == null)
throw new RuntimeException("Can't find run_PojoCache property");
runPojoCache_ = new Boolean(str).booleanValue();
str = (String) props_.get("update_pojo_interval");
if (str == null)
throw new RuntimeException("Can't find update_pojo_interval property");
updatePojoInterval_ = Integer.parseInt(str);
countdown_ = new CountDown(threads_);
// Warm up the cache first to avoid any simultaneous write contention.
if (cache_.getCoordinator().equals(cache_.getLocalAddress()))
{
System.out.println("I am the coordinator: " + cache_.getLocalAddress());
cache_.put(ROOT + cache_.getLocalAddress().toString(), "test", "test");
}
sleep_(300);
startTime_ = System.currentTimeMillis();
for (int i = 0; i < threads_; i++)
{
Loader loader = new Loader(cache_, i, cache_.getLocalAddress().toString());
loader.start();
}
}
static int getSleepInterval()
{
if (sleepIntervalInMillis_ == 0) return sleepIntervalInMillis_;
if (randomSleep_)
{
return random.nextInt(sleepIntervalInMillis_);
}
else
{
return sleepIntervalInMillis_;
}
}
List getMembers(String addrListStr)
{
StringTokenizer tok = new StringTokenizer(addrListStr, ",");
List list = new ArrayList();
while (tok.hasMoreTokens())
{
list.add(tok.nextToken());
}
return list;
}
static void sleep_(long msec)
{
try
{
Thread.sleep(msec);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
void printUsage()
{
System.out.println("Options: -config to specify config file (like bench.txt)\n" +
" -receiver receiver only mode, e.g., no send\n");
}
public void printInfo()
{
System.out.println("\n************************************");
System.out.println("\nThis is a simple performance test for JBossCache.\n" +
"The load pattern is like those of http session repl, that is,\neach put " +
"is under a separate sub-tree.\n" + "As a result, there should not be write contention.\n");
}
public static void main(String[] args) throws Exception
{
FieldUpdateEmulator server = new FieldUpdateEmulator();
if (args.length == 0)
{
server.printUsage();
return;
}
for (int i = 0; i < args.length; i++)
{
if ("-config".equals(args[i]))
{
System.out.println("Configuration file is: " + args[i + 1]);
server.parseConfig(args[++i]);
}
else if ("-receiver".equals(args[i]))
{
receiver_ = true;
}
else
{
System.err.println("Unknown argument:" + args[i]);
server.printUsage();
return;
}
}
server.printInfo();
String addrListStr = (String) server.props_.get("members");
List addrList = server.getMembers(addrListStr);
server.initCache();
ViewListener listener = new ViewListener(addrList);
cache_.getNotifier().addCacheListener(listener); // register for view change
cache_.start();
System.out.println("Cache started .. ");
System.out.println("Waiting for the other nodes to start up...");
while (!isReady_)
{
sleep_(100);
}
sleep_(2000);
if (!receiver_)
{
server.startLoadTest();
System.out.println("Waiting for the test to finish...");
// blocked untill all done
countdown_.acquire();
endTime_ = System.currentTimeMillis();
System.out.println("\nThroughtput for this node with:\n" + "threads = " + threads_
+ "\nloops = " + loops_
+ "\nsleep interval = " + sleepIntervalInMillis_ + "\nobject list size = "
+ objectListSize_ + "\ntranasaction? " + transaction_ + "\n"
+ "is: " + (loops_ * threads_ * 1000) / (endTime_ - startTime_) + " requests/sec\n");
}
else
{
System.out.println("Receiver mode only. Won't send request...");
}
if (ex_ != null)
{
System.err.println("Exception counts: " + exceptionCounts_);
throw new RuntimeException("Exception occurred during the run: " + ex_);
}
System.out.println("Test is finished. hit ctrl-c to kill the process ...");
while (true)
{
sleep_(1000);
}
// server.destroyCache();
}
public static class ViewListener extends AbstractCacheListener
{
List addrList_; // List of address
public ViewListener(List addrList)
{
addrList_ = addrList;
}
public void viewChange(View new_view) // might be MergeView after merging
{
Vector vector = new_view.getMembers();
if (vector.size() > addrList_.size())
{
// We have more members than is specified!
throw new RuntimeException("You have more members in the cluster group than specified in the config."
+ " size of members now: " + vector.size());
}
// Need to match individually but let's just assum the size matters then.
if (vector.size() == addrList_.size())
{
// good to go
isReady_ = true;
}
}
}
public static class Loader extends Thread
{
int threadId;
String localAddress;
PojoCache cache_;
TransactionManager tm_ = DummyTransactionManager.getInstance();
public Loader(PojoCache cache, int i, String localAddress)
{
cache_ = cache;
threadId = i;
this.localAddress = localAddress;
// System.out.println("Local addres is: " +localAddress.toString());
}
public void run()
{
Object obj = null;
Object obj1 = Loader.constructObject();
Object obj2 = Loader.constructObject();
String fqn = ROOT + localAddress + "/" + threadId;
boolean transaction = true;
long start_time = System.currentTimeMillis();
for (int i = 0; i < loops_; i++)
{
if (getSleepInterval() != 0)
sleep_(getSleepInterval());
Transaction tx = null;
try
{
boolean update = true;
if (runPojoCache_ && updatePojoInterval_ != 1)
{
if ((i % updatePojoInterval_) != 0)
{
transaction = false; // no transaction for field update.
update = false;
}
}
if (transaction)
{
tm_.begin();
tx = tm_.getTransaction();
}
/**
* Need this for PojoCache. Otherwise, it'd be cheating becuase another putObject of thee
* same pojo is fast.
*/
if (update)
{
if (((i + 1) % 2) == 0)
obj = obj1;
else
obj = obj2;
}
doWork(fqn, localAddress, obj, update);
if (transaction)
{
tx.commit();
}
}
catch (Exception e)
{
exceptionCounts_++;
ex_ = e;
e.printStackTrace();
if (transaction)
{
try
{
tx.rollback();
}
catch (SystemException e1)
{
e1.printStackTrace();
ex_ = e1;
}
}
}
finally
{
}
// System.out.println("Processing at loop: " +i);
if ((i % 50) == 0 && threadId == 0)
{
System.out.println("Processing at loop: " + i);
}
}
long end_time = System.currentTimeMillis();
long time = end_time - start_time;
if (threadId == 0)
System.out.println("Total time spent (ms) on thread id 0: " + time + " for " + loops_ + " loops");
countdown_.release();
}
void doWork(String fqn, Object key, Object pojo, boolean update) throws CacheException
{
if (!runPojoCache_)
{
doPlainCacheWork(fqn, key, pojo);
}
else
{
if (update)
{
// System.out.println("Running pojo cache update");
Object obj = Loader.constructObject();
doPojoCacheWork(fqn, obj);
}
else
{
// System.out.println("Running field update");
doPojoCacheFieldWork(fqn, pojo);
}
}
}
void doPlainCacheWork(String fqn, Object key, Object pojo) throws CacheException
{
if (operationType_ == 1)
{
cache_.put(fqn, key, pojo);
Object obj = cache_.get(fqn, key);
if (!((Student) obj).getName().equals("Joe"))
{
throw new RuntimeException("Value returned not Joe");
}
}
else
{
cache_.remove(fqn, key);
cache_.put(fqn, key, pojo);
Object obj = cache_.get(fqn, key);
if (!((Student) obj).getName().equals("Joe"))
{
throw new RuntimeException("Value returned not Joe");
}
}
}
// Fake it
void doPojoCacheWork(String fqn, Object pojo) throws CacheException
{
cache_.put(fqn, "test", pojo);
cache_.get(fqn, "test");
}
void doPojoCacheFieldWork(String fqn, Object pojo) throws CacheException
{
Object obj = cache_.get(fqn, "test");
cache_.put(fqn, "test1", "Pingtung");
}
static Object constructObject()
{
Student joe = new Student();
joe.setName("Joe");
Address add = new Address();
add.setZip(94086);
add.setCity("Sunnyvale)");
add.setStreet("Albertson");
joe.setAddress(add);
String str;
for (int i = 0; i < objectListSize_; i++)
{
Course course = new Course();
str = RandomString.randomstring(10, 20);
course.setInstructor(str);
str = RandomString.randomstring(10, 20);
course.setTitle(str);
str = RandomString.randomstring(10, 20);
course.setRoom(str);
joe.addCourse(course);
}
return joe;
}
}
}
1.1 date: 2006/10/31 08:01:14; author: bwang; state: Exp;JBossCache/old/tests/perf/org/jboss/cache/aop/StudentMetrics.java
Index: StudentMetrics.java
===================================================================
/*
*
* JBoss, the OpenSource J2EE webOS
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jboss.cache.aop;
import junit.framework.TestCase;
import org.jboss.cache.aop.test.Address;
import org.jboss.cache.aop.test.Student;
import org.jboss.cache.config.Configuration;
import org.jboss.cache.factories.XmlConfigurationParser;
import java.util.Hashtable;
import java.util.Random;
/**
* Driver test for performance...
* <ul>
* <li>automatic state fail over</li>
* <li>fine-grained replication</li>
* <li>preservation of object graph relationship</li>
* </ul>
*/
public class StudentMetrics extends TestCase {
Hashtable hash = new Hashtable();
// cache1 and cache2 are in the same clustering group.
private PojoCache cache1_;
private PojoCache cache2_;
int TIMES = 1000;
protected void setUp() throws Exception {
cache1_ = createCache("TestCluster");
cache2_ = createCache("TestCluster");
init();
}
protected void tearDown() throws Exception {
cache1_.remove("/");
cache1_.stop();
cache2_.stop();
}
private PojoCache createCache(String name) throws Exception {
PojoCache tree = new PojoCache();
// read in the replSync xml. Here we use synchronous mode replication.
//config.configure(tree, "META-INF/replSync-service.xml");
XmlConfigurationParser parser = new XmlConfigurationParser();
Configuration c = parser.parseFile("META-INF/replSync-service.xml");
c.setClusterName(name);
tree.setConfiguration(c);
tree.start(); // kick start the cache
return tree;
}
/**
* Populate the propagation tree.
*
* @throws Exception
*/
protected void init() throws Exception {
Random r = new Random();
for (int i = 0; i < TIMES; i++) {
Student unk = new Student();
unk.setName(""+ i );
Address address = new Address();
address.setStreet(r.nextInt() + "Oak Drive");
address.setCity("Pleasantville, CA" + r.nextInt());
address.setZip(r.nextInt(99999));
unk.setAddress(address);
hash.put(unk.getName(),unk);
}
System.out.println("Finished with init...");
}
public void testPropagation() throws Exception {
//iterate through all without the cache...
long ms = System.currentTimeMillis();
System.out.print("running through hash by itself...");
for(int i = 0; i < TIMES; i++) {
Student x = (Student)hash.get("" + i);
if(i % 100 == 0) {
System.out.println(x);
}
}
System.out.println(TIMES + " hashmap ops took "+(System.currentTimeMillis() - ms));
System.out.println("adding to cache 1...");
//iterate through all while adding to the cache...
ms = System.currentTimeMillis();
for(int i = 0; i < TIMES; i++) {
cache1_.putObject("test/"+i, hash.get(""+i));
}
System.out.println(TIMES +" putObject ops took "+(System.currentTimeMillis() - ms));
// Retrieve All from Server #2
System.out.println("retreiving from cache 2...");
ms = System.currentTimeMillis();
/*
for(int i = 0; i < TIMES; i++) {
Student y = (Student)cache2_.getObject("test/"+i);
if(i % 100 == 0) {
System.out.println(y);
}
}
System.out.println(TIMES +" getObject ops took "+(System.currentTimeMillis() - ms));
*/
}
public static void main(String[] args) throws Exception {
StudentMetrics smTest = new StudentMetrics();
smTest.setUp();
smTest.testPropagation();
smTest.tearDown();
}
}
More information about the jboss-cvs-commits
mailing list