rhmessaging commits: r2887 - mgmt/trunk/cumin/python/cumin.
by rhmessaging-commits@lists.jboss.org
Author: eallen
Date: 2008-11-26 11:06:25 -0500 (Wed, 26 Nov 2008)
New Revision: 2887
Modified:
mgmt/trunk/cumin/python/cumin/model.py
Log:
Move "See Other Pools" link to top
Modified: mgmt/trunk/cumin/python/cumin/model.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/model.py 2008-11-26 15:51:20 UTC (rev 2886)
+++ mgmt/trunk/cumin/python/cumin/model.py 2008-11-26 16:06:25 UTC (rev 2887)
@@ -1874,15 +1874,15 @@
def __init__(self, model):
super(CuminPool, self).__init__(model, "pool", Pool)
- prop = CuminProperty(self, "id")
- prop.title = "Collector ID"
- prop.summary = True
-
prop = self.OtherPools(self, "others")
prop.title = "See Other Pools"
prop.escape = False
prop.summary = True
+ prop = CuminProperty(self, "id")
+ prop.title = "Collector ID"
+ prop.summary = True
+
stat = self.PercentStat(self, "Running")
stat.title = "Running Jobs"
16 years, 1 month
rhmessaging commits: r2886 - mgmt/trunk/cumin/python/cumin.
by rhmessaging-commits@lists.jboss.org
Author: eallen
Date: 2008-11-26 10:51:20 -0500 (Wed, 26 Nov 2008)
New Revision: 2886
Modified:
mgmt/trunk/cumin/python/cumin/model.py
Log:
Added "See Other Pools" link to Pool view
Modified: mgmt/trunk/cumin/python/cumin/model.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/model.py 2008-11-26 14:04:03 UTC (rev 2885)
+++ mgmt/trunk/cumin/python/cumin/model.py 2008-11-26 15:51:20 UTC (rev 2886)
@@ -1,17 +1,19 @@
-import logging
+from datetime import datetime, timedelta
+from formats import *
+from job import *
from mint.schema import *
+from parameters import *
+from pool import PoolSlotSet, PoolMachineSet
+from struct import unpack, calcsize
+from system import SystemSlotSet
+from time import *
+from types import *
+from util import *
from wooly import *
from wooly.parameters import *
from wooly.widgets import *
-from time import *
-from datetime import datetime, timedelta
-from types import *
-from struct import unpack, calcsize
+import logging
-from util import *
-from formats import *
-from parameters import *
-from job import *
log = logging.getLogger("cumin.model")
@@ -612,7 +614,6 @@
def get_colors(self):
return self.load_colors
-from system import SystemSlotSet
class CuminSystem(RemoteClass):
def __init__(self, model):
super(CuminSystem, self).__init__(model, "system", System, SystemStats)
@@ -1868,8 +1869,6 @@
return Pool(coll)
get = classmethod(get)
-from job import JobSet
-from pool import PoolSlotSet, PoolMachineSet
class CuminPool(CuminClass):
def __init__(self, model):
@@ -1878,6 +1877,11 @@
prop = CuminProperty(self, "id")
prop.title = "Collector ID"
prop.summary = True
+
+ prop = self.OtherPools(self, "others")
+ prop.title = "See Other Pools"
+ prop.escape = False
+ prop.summary = True
stat = self.PercentStat(self, "Running")
stat.title = "Running Jobs"
@@ -1917,6 +1921,21 @@
return title
+ class OtherPools(CuminProperty):
+ def get_title(self, session):
+ return ""
+
+ def value(self, session, object):
+ branch = session.branch()
+ self.model.frame.view.show(branch)
+ href = branch.marshal()
+
+ html = """
+ <ul class="ActionSet">
+ <li><a href="%s">%s</a></li>
+ </ul>"""
+ return html % (href, self.title)
+
class PercentStat(CuminStat):
def value_text(self, pool):
state = self.name
16 years, 1 month
rhmessaging commits: r2885 - mgmt/trunk/cumin/python/cumin.
by rhmessaging-commits@lists.jboss.org
Author: eallen
Date: 2008-11-26 09:04:03 -0500 (Wed, 26 Nov 2008)
New Revision: 2885
Modified:
mgmt/trunk/cumin/python/cumin/system.py
Log:
Use full broker URL in services list
Modified: mgmt/trunk/cumin/python/cumin/system.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/system.py 2008-11-25 22:52:48 UTC (rev 2884)
+++ mgmt/trunk/cumin/python/cumin/system.py 2008-11-26 14:04:03 UTC (rev 2885)
@@ -181,7 +181,7 @@
reg = Identifiable(item.id)
self.page.main.set_messaging_tab(session)
href = self.page.main.broker.get_href(session, reg)
- return fmt_link(href, fmt_shorten(item.url))
+ return fmt_link(href, item.url)
else:
pool = model.Pool.get(item.Pool)
self.page.main.pool.set_object(session, pool)
16 years, 1 month
rhmessaging commits: r2884 - mgmt/trunk.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 17:52:48 -0500 (Tue, 25 Nov 2008)
New Revision: 2884
Modified:
mgmt/trunk/README
Log:
Update the README
Modified: mgmt/trunk/README
===================================================================
--- mgmt/trunk/README 2008-11-25 22:51:10 UTC (rev 2883)
+++ mgmt/trunk/README 2008-11-25 22:52:48 UTC (rev 2884)
@@ -37,6 +37,7 @@
$ cd ~/lib/python
$ ln -s ~/pyqpid/qpid
$ ln -s ~/pyqpid/mllib
+ $ ln -s ~/pyqpid/qmf
*Alternatively*, install python-qpid:
@@ -48,7 +49,7 @@
Change to the mgmt directory (the one containing this README file),
and source the devel environment settings:
- $ cd mgmt
+ $ cd mgmt
$ source etc/devel.profile # Or use etc/devel.profile.tcsh
Check that everything is set up properly:
@@ -73,7 +74,7 @@
Edit postgresql permissions:
$ vi /var/lib/pgsql/data/pg_hba.conf
-
+
[Add the following line, *before* the other similar lines]
host cumin cumin 127.0.0.1/32 trust
@@ -81,7 +82,7 @@
Alternative postgresql permissions:
$ vi /var/lib/pgsql/data/pg_hba.conf
-
+
[Add the following line, *before* the other similar lines]
host cumin cumin 127.0.0.1/32 ident cumin
@@ -132,8 +133,8 @@
Add a cumin user:
$ cumin-admin add-user guest
- Set password: # Enter a password for guest
- Retype password: # Confirm said password
+ Enter new password: # Enter a password for guest
+ Confirm new password: # Re-type said password
User 'guest' is added
16 years, 1 month
rhmessaging commits: r2883 - in mgmt/trunk/mint: sql and 1 other directory.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 17:51:10 -0500 (Tue, 25 Nov 2008)
New Revision: 2883
Modified:
mgmt/trunk/mint/python/mint/schema.py
mgmt/trunk/mint/python/mint/schemaparser.py
mgmt/trunk/mint/python/mint/sql.py
mgmt/trunk/mint/python/mint/update.py
mgmt/trunk/mint/sql/schema.sql
Log:
Schema renames. A reschema is required after this change.
Modified: mgmt/trunk/mint/python/mint/schema.py
===================================================================
--- mgmt/trunk/mint/python/mint/schema.py 2008-11-25 21:09:24 UTC (rev 2882)
+++ mgmt/trunk/mint/python/mint/schema.py 2008-11-25 22:51:10 UTC (rev 2883)
@@ -14,10 +14,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -135,10 +135,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -179,9 +179,9 @@
actualArgs = list()
if JobAd is not None:
actualArgs.append(JobAd)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "GetAd",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "GetAd",
+ callback, args=actualArgs)
def SetAttribute(self, model, callback, Name, Value):
actualArgs = list()
@@ -189,33 +189,33 @@
actualArgs.append(Name)
if Value is not None:
actualArgs.append(Value)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "SetAttribute",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "SetAttribute",
+ callback, args=actualArgs)
def Hold(self, model, callback, Reason):
actualArgs = list()
if Reason is not None:
actualArgs.append(Reason)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Hold",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Hold",
+ callback, args=actualArgs)
def Release(self, model, callback, Reason):
actualArgs = list()
if Reason is not None:
actualArgs.append(Reason)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Release",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Release",
+ callback, args=actualArgs)
def Remove(self, model, callback, Reason):
actualArgs = list()
if Reason is not None:
actualArgs.append(Reason)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Remove",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Remove",
+ callback, args=actualArgs)
def Fetch(self, model, callback, File, Start, End, Data):
actualArgs = list()
@@ -227,9 +227,9 @@
actualArgs.append(End)
if Data is not None:
actualArgs.append(Data)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Fetch",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Fetch",
+ callback, args=actualArgs)
class JobStats(SQLObject):
class sqlmeta:
@@ -247,10 +247,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -300,10 +300,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -336,10 +336,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -361,9 +361,9 @@
actualArgs = list()
if Limits is not None:
actualArgs.append(Limits)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "GetLimits",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "GetLimits",
+ callback, args=actualArgs)
def SetLimit(self, model, callback, Name, Max):
actualArgs = list()
@@ -371,15 +371,15 @@
actualArgs.append(Name)
if Max is not None:
actualArgs.append(Max)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "SetLimit",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "SetLimit",
+ callback, args=actualArgs)
def Reconfig(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Reconfig",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Reconfig",
+ callback, args=actualArgs)
class NegotiatorStats(SQLObject):
class sqlmeta:
@@ -404,10 +404,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -438,10 +438,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -465,17 +465,17 @@
actualArgs = list()
if Subsystem is not None:
actualArgs.append(Subsystem)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Start",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Start",
+ callback, args=actualArgs)
def Stop(self, model, callback, Subsystem):
actualArgs = list()
if Subsystem is not None:
actualArgs.append(Subsystem)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "Stop",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "Stop",
+ callback, args=actualArgs)
class MasterStats(SQLObject):
class sqlmeta:
@@ -500,10 +500,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -520,9 +520,9 @@
def reloadACLFile(self, model, callback):
"""Reload the ACL file"""
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "reloadACLFile",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "reloadACLFile",
+ callback, args=actualArgs)
class AclStats(SQLObject):
class sqlmeta:
@@ -541,10 +541,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -562,15 +562,15 @@
def stopClusterNode(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "stopClusterNode",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "stopClusterNode",
+ callback, args=actualArgs)
def stopFullCluster(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "stopFullCluster",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "stopFullCluster",
+ callback, args=actualArgs)
class ClusterStats(SQLObject):
class sqlmeta:
@@ -588,10 +588,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -636,10 +636,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -664,9 +664,9 @@
actualArgs = list()
if by is not None:
actualArgs.append(by)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "expand",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "expand",
+ callback, args=actualArgs)
class JournalStats(SQLObject):
class sqlmeta:
@@ -712,10 +712,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -746,10 +746,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -774,9 +774,9 @@
actualArgs.append(sequence)
if body is not None:
actualArgs.append(body)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "echo",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "echo",
+ callback, args=actualArgs)
def connect(self, model, callback, host, port, durable, authMechanism, username, password, transport):
"""Establish a connection to another broker"""
@@ -795,9 +795,9 @@
actualArgs.append(password)
if transport is not None:
actualArgs.append(transport)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "connect",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "connect",
+ callback, args=actualArgs)
def queueMoveMessages(self, model, callback, srcQueue, destQueue, qty):
"""Move messages from one queue to another"""
@@ -808,9 +808,9 @@
actualArgs.append(destQueue)
if qty is not None:
actualArgs.append(qty)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "queueMoveMessages",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "queueMoveMessages",
+ callback, args=actualArgs)
class BrokerStats(SQLObject):
class sqlmeta:
@@ -828,10 +828,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -862,10 +862,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -893,10 +893,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -916,9 +916,9 @@
actualArgs = list()
if request is not None:
actualArgs.append(request)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "purge",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "purge",
+ callback, args=actualArgs)
class QueueStats(SQLObject):
class sqlmeta:
@@ -963,10 +963,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1008,10 +1008,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1042,10 +1042,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1062,9 +1062,9 @@
def close(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "close",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "close",
+ callback, args=actualArgs)
class ClientConnectionStats(SQLObject):
class sqlmeta:
@@ -1087,10 +1087,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1106,9 +1106,9 @@
def close(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "close",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "close",
+ callback, args=actualArgs)
def bridge(self, model, callback, durable, src, dest, key, tag, excludes, srcIsQueue, srcIsLocal, dynamic):
"""Bridge messages over the link"""
@@ -1131,9 +1131,9 @@
actualArgs.append(srcIsLocal)
if dynamic is not None:
actualArgs.append(dynamic)
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "bridge",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "bridge",
+ callback, args=actualArgs)
class LinkStats(SQLObject):
class sqlmeta:
@@ -1153,10 +1153,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1178,9 +1178,9 @@
def close(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "close",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "close",
+ callback, args=actualArgs)
class BridgeStats(SQLObject):
class sqlmeta:
@@ -1198,10 +1198,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1219,27 +1219,27 @@
def solicitAck(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "solicitAck",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "solicitAck",
+ callback, args=actualArgs)
def detach(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "detach",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "detach",
+ callback, args=actualArgs)
def resetLifespan(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "resetLifespan",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "resetLifespan",
+ callback, args=actualArgs)
def close(self, model, callback):
actualArgs = list()
- originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)
- model.callMethod(self.managedBroker, originalId, self.qmfClassKey, "close",
- callback, args=actualArgs)
+ originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)
+ model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, "close",
+ callback, args=actualArgs)
class SessionStats(SQLObject):
class sqlmeta:
@@ -1262,10 +1262,10 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
- sourceScopeId = BigIntCol(default=None)
- sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
+ qmfBrokerId = StringCol(length=1000, default=None)
+ qmfScopeId = BigIntCol(default=None)
+ qmfObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
Modified: mgmt/trunk/mint/python/mint/schemaparser.py
===================================================================
--- mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-25 21:09:24 UTC (rev 2882)
+++ mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-25 22:51:10 UTC (rev 2883)
@@ -92,7 +92,7 @@
self.pythonOutput += " lazyUpdate = %s\n\n" % (lazyUpdate)
def generateSourceIdsIndex(self, className):
- self.pythonOutput += " source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)\n"
+ self.pythonOutput += " source_ids_unique = DatabaseIndex(qmfBrokerId, qmfScopeId, qmfObjectId, unique=True)\n"
def generateClassAttribs(self, schemaName, elements):
if (schemaName == "JournalStats"):
@@ -143,9 +143,9 @@
self.generateForeignKeyAttrib(colPythonName[0].lower() + colPythonName[1:], keyPythonName)
self.generateMultipleJoin(origPythonName, pythonName, "stats")
else:
- self.generateAttrib("managedBroker", "StringCol", "length=1000")
- self.generateAttrib("sourceScopeId", "BigIntCol")
- self.generateAttrib("sourceObjectId", "BigIntCol")
+ self.generateAttrib("qmfBrokerId", "StringCol", "length=1000")
+ self.generateAttrib("qmfScopeId", "BigIntCol")
+ self.generateAttrib("qmfObjectId", "BigIntCol")
self.generateSourceIdsIndex(pythonName)
self.generateAttrib("qmfClassKey", "StringCol", "length=1000")
self.generateTimestampAttrib("creation")
@@ -175,9 +175,9 @@
self.pythonOutput += "\n def %s(self, model, callback%s):\n" % (elem["@name"], formalArgs)
self.pythonOutput += comment
self.pythonOutput += actualArgs
- self.pythonOutput += " originalId = ObjectId(None, self.sourceScopeId, self.sourceObjectId)\n"
- self.pythonOutput += " model.callMethod(self.managedBroker, originalId, self.qmfClassKey, \"%s\",\n" % elem["@name"]
- self.pythonOutput += " callback, args=actualArgs)\n"
+ self.pythonOutput += " originalId = ObjectId(None, self.qmfScopeId, self.qmfObjectId)\n"
+ self.pythonOutput += " model.callMethod(self.qmfBrokerId, originalId, self.qmfClassKey, \"%s\",\n" % elem["@name"]
+ self.pythonOutput += " callback, args=actualArgs)\n"
def endClass(self):
if (self.additionalPythonOutput != ""):
Modified: mgmt/trunk/mint/python/mint/sql.py
===================================================================
--- mgmt/trunk/mint/python/mint/sql.py 2008-11-25 21:09:24 UTC (rev 2882)
+++ mgmt/trunk/mint/python/mint/sql.py 2008-11-25 22:51:10 UTC (rev 2883)
@@ -70,9 +70,9 @@
return """
select id from %s
- where source_scope_id = %%(sourceScopeId)s
- and source_object_id = %%(sourceObjectId)s
- and managed_broker = %%(managedBroker)s
+ where qmf_scope_id = %%(qmfScopeId)s
+ and qmf_object_id = %%(qmfObjectId)s
+ and qmf_broker_id = %%(qmfBrokerId)s
""" % table
class SqlSetStatsRefs(SqlOperation):
Modified: mgmt/trunk/mint/python/mint/update.py
===================================================================
--- mgmt/trunk/mint/python/mint/update.py 2008-11-25 21:09:24 UTC (rev 2882)
+++ mgmt/trunk/mint/python/mint/update.py 2008-11-25 22:51:10 UTC (rev 2883)
@@ -200,10 +200,10 @@
log.debug("%s(%s) marked deleted", cls.__name__, oid)
- attrs["sourceScopeId"] = oid.first
- attrs["sourceObjectId"] = oid.second
+ attrs["qmfScopeId"] = oid.first
+ attrs["qmfObjectId"] = oid.second
attrs["qmfClassKey"] = str(self.object.getClassKey())
- attrs["managedBroker"] = self.broker.qmfId
+ attrs["qmfBrokerId"] = self.broker.qmfId
cursor = conn.cursor()
Modified: mgmt/trunk/mint/sql/schema.sql
===================================================================
--- mgmt/trunk/mint/sql/schema.sql 2008-11-25 21:09:24 UTC (rev 2882)
+++ mgmt/trunk/mint/sql/schema.sql 2008-11-25 22:51:10 UTC (rev 2883)
@@ -65,9 +65,9 @@
CREATE TABLE acl (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -79,7 +79,7 @@
transfer_acl BOOL,
last_acl_load TIMESTAMP
);
-CREATE UNIQUE INDEX acl_source_ids_unique ON acl (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX acl_source_ids_unique ON acl (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE acl_stats (
id SERIAL PRIMARY KEY,
@@ -91,9 +91,9 @@
CREATE TABLE agent (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -106,7 +106,7 @@
broker_bank BIGINT,
agent_bank BIGINT
);
-CREATE UNIQUE INDEX agent_source_ids_unique ON agent (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX agent_source_ids_unique ON agent (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE agent_stats (
id SERIAL PRIMARY KEY,
@@ -117,9 +117,9 @@
CREATE TABLE binding (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -131,7 +131,7 @@
arguments BYTEA,
origin VARCHAR(1000)
);
-CREATE UNIQUE INDEX binding_source_ids_unique ON binding (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX binding_source_ids_unique ON binding (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE binding_stats (
id SERIAL PRIMARY KEY,
@@ -143,9 +143,9 @@
CREATE TABLE bridge (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -163,7 +163,7 @@
excludes VARCHAR(1000),
dynamic BOOL
);
-CREATE UNIQUE INDEX bridge_source_ids_unique ON bridge (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX bridge_source_ids_unique ON bridge (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE bridge_stats (
id SERIAL PRIMARY KEY,
@@ -174,9 +174,9 @@
CREATE TABLE broker (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -193,7 +193,7 @@
data_dir VARCHAR(1000),
registration_id INT
);
-CREATE UNIQUE INDEX broker_source_ids_unique ON broker (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX broker_source_ids_unique ON broker (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE broker_stats (
id SERIAL PRIMARY KEY,
@@ -204,9 +204,9 @@
CREATE TABLE client_connection (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -219,7 +219,7 @@
federation_link BOOL,
auth_identity VARCHAR(1000)
);
-CREATE UNIQUE INDEX client_connection_source_ids_unique ON client_connection (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX client_connection_source_ids_unique ON client_connection (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE client_connection_stats (
id SERIAL PRIMARY KEY,
@@ -235,9 +235,9 @@
CREATE TABLE cluster (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -251,7 +251,7 @@
status VARCHAR(1000),
members VARCHAR(4000)
);
-CREATE UNIQUE INDEX cluster_source_ids_unique ON cluster (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX cluster_source_ids_unique ON cluster (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE cluster_stats (
id SERIAL PRIMARY KEY,
@@ -262,9 +262,9 @@
CREATE TABLE collector (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -277,7 +277,7 @@
name VARCHAR(1000),
public_network_ip_addr VARCHAR(1000)
);
-CREATE UNIQUE INDEX collector_source_ids_unique ON collector (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX collector_source_ids_unique ON collector (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE collector_stats (
id SERIAL PRIMARY KEY,
@@ -288,9 +288,9 @@
CREATE TABLE exchange (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -302,7 +302,7 @@
durable BOOL,
arguments BYTEA
);
-CREATE UNIQUE INDEX exchange_source_ids_unique ON exchange (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX exchange_source_ids_unique ON exchange (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE exchange_stats (
id SERIAL PRIMARY KEY,
@@ -325,9 +325,9 @@
CREATE TABLE job (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -362,7 +362,7 @@
dag_man_job_id BIGINT,
ad BYTEA
);
-CREATE UNIQUE INDEX job_source_ids_unique ON job (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX job_source_ids_unique ON job (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE job_stats (
id SERIAL PRIMARY KEY,
@@ -373,9 +373,9 @@
CREATE TABLE journal (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -393,7 +393,7 @@
data_file_size BIGINT,
current_file_count BIGINT
);
-CREATE UNIQUE INDEX journal_source_ids_unique ON journal (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX journal_source_ids_unique ON journal (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE journal_stats (
id SERIAL PRIMARY KEY,
@@ -432,9 +432,9 @@
CREATE TABLE link (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -446,7 +446,7 @@
transport VARCHAR(1000),
durable BOOL
);
-CREATE UNIQUE INDEX link_source_ids_unique ON link (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX link_source_ids_unique ON link (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE link_stats (
id SERIAL PRIMARY KEY,
@@ -459,9 +459,9 @@
CREATE TABLE master (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -478,7 +478,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX master_source_ids_unique ON master (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX master_source_ids_unique ON master (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE master_stats (
id SERIAL PRIMARY KEY,
@@ -495,9 +495,9 @@
CREATE TABLE negotiator (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -512,7 +512,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX negotiator_source_ids_unique ON negotiator (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX negotiator_source_ids_unique ON negotiator (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE negotiator_stats (
id SERIAL PRIMARY KEY,
@@ -534,9 +534,9 @@
CREATE TABLE queue (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -549,7 +549,7 @@
exclusive BOOL,
arguments BYTEA
);
-CREATE UNIQUE INDEX queue_source_ids_unique ON queue (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX queue_source_ids_unique ON queue (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE queue_stats (
id SERIAL PRIMARY KEY,
@@ -587,9 +587,9 @@
CREATE TABLE scheduler (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -607,7 +607,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX scheduler_source_ids_unique ON scheduler (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX scheduler_source_ids_unique ON scheduler (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE scheduler_stats (
id SERIAL PRIMARY KEY,
@@ -630,9 +630,9 @@
CREATE TABLE session (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -646,7 +646,7 @@
attached BOOL,
expire_time TIMESTAMP
);
-CREATE UNIQUE INDEX session_source_ids_unique ON session (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX session_source_ids_unique ON session (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE session_stats (
id SERIAL PRIMARY KEY,
@@ -662,9 +662,9 @@
CREATE TABLE slot (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -724,7 +724,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX slot_source_ids_unique ON slot (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX slot_source_ids_unique ON slot (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE slot_stats (
id SERIAL PRIMARY KEY,
@@ -773,9 +773,9 @@
CREATE TABLE store (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -793,7 +793,7 @@
tpl_data_file_size BIGINT,
tpl_current_file_count BIGINT
);
-CREATE UNIQUE INDEX store_source_ids_unique ON store (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX store_source_ids_unique ON store (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE store_stats (
id SERIAL PRIMARY KEY,
@@ -813,9 +813,9 @@
CREATE TABLE submitter (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -827,7 +827,7 @@
name VARCHAR(1000),
schedd_name VARCHAR(1000)
);
-CREATE UNIQUE INDEX submitter_source_ids_unique ON submitter (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX submitter_source_ids_unique ON submitter (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE submitter_stats (
id SERIAL PRIMARY KEY,
@@ -841,9 +841,9 @@
CREATE TABLE sysimage (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -858,7 +858,7 @@
mem_total BIGINT,
swap_total BIGINT
);
-CREATE UNIQUE INDEX sysimage_source_ids_unique ON sysimage (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX sysimage_source_ids_unique ON sysimage (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE sysimage_stats (
id SERIAL PRIMARY KEY,
@@ -876,9 +876,9 @@
CREATE TABLE system (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -891,7 +891,7 @@
version VARCHAR(1000),
machine VARCHAR(1000)
);
-CREATE UNIQUE INDEX system_source_ids_unique ON system (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX system_source_ids_unique ON system (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE system_stats (
id SERIAL PRIMARY KEY,
@@ -902,9 +902,9 @@
CREATE TABLE vhost (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
- managed_broker VARCHAR(1000),
- source_scope_id BIGINT,
- source_object_id BIGINT,
+ qmf_broker_id VARCHAR(1000),
+ qmf_scope_id BIGINT,
+ qmf_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
@@ -914,7 +914,7 @@
name VARCHAR(1000),
federation_tag VARCHAR(1000)
);
-CREATE UNIQUE INDEX vhost_source_ids_unique ON vhost (managed_broker, source_scope_id, source_object_id);
+CREATE UNIQUE INDEX vhost_source_ids_unique ON vhost (qmf_broker_id, qmf_scope_id, qmf_object_id);
CREATE TABLE vhost_stats (
id SERIAL PRIMARY KEY,
16 years, 1 month
rhmessaging commits: r2882 - mgmt/trunk/mint/python/mint.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 16:09:24 -0500 (Tue, 25 Nov 2008)
New Revision: 2882
Modified:
mgmt/trunk/mint/python/mint/__init__.py
Log:
Add getFullUrl to broker proxy
Modified: mgmt/trunk/mint/python/mint/__init__.py
===================================================================
--- mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 20:22:08 UTC (rev 2881)
+++ mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 21:09:24 UTC (rev 2882)
@@ -306,6 +306,9 @@
def getAmqpSession(self):
return self.qmfBroker.getAmqpSession()
+ def getFullUrl(self):
+ return self.qmfBroker.getFullUrl()
+
class MintModel(qmf.console.Console):
staticInstance = None
16 years, 1 month
rhmessaging commits: r2881 - mgmt/trunk/cumin/python/cumin.
by rhmessaging-commits@lists.jboss.org
Author: eallen
Date: 2008-11-25 15:22:08 -0500 (Tue, 25 Nov 2008)
New Revision: 2881
Modified:
mgmt/trunk/cumin/python/cumin/page.py
mgmt/trunk/cumin/python/cumin/pool.py
mgmt/trunk/cumin/python/cumin/system.py
mgmt/trunk/cumin/python/cumin/system.strings
Log:
Added a Services tab under Systems to link to any daemons (and broker) under that system.
Modified: mgmt/trunk/cumin/python/cumin/page.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/page.py 2008-11-25 20:02:24 UTC (rev 2880)
+++ mgmt/trunk/cumin/python/cumin/page.py 2008-11-25 20:22:08 UTC (rev 2881)
@@ -118,9 +118,15 @@
def show_grid_tab(self, session):
self.__tabs.set_grid_tab(session)
+ self.view.set_grid_mode(session)
def show_system_tab(self, session):
self.__tabs.set_system_tab(session)
+ self.view.set_systems_mode(session)
+
+ def set_messaging_tab(self, session):
+ self.__tabs.set_messaging_tab(session)
+ self.view.set_messaging_mode(session)
class MainFrameTabs(LinkSet):
def __init__(self, app, name):
@@ -144,6 +150,9 @@
def set_system_tab(self, session):
self.selection.set(session, "stab")
+ def set_messaging_tab(self, session):
+ self.selection.set(session, "mtab")
+
class Tab(Link):
def render_class(self, session):
return (self.parent.selection.get(session) == self.name) \
@@ -202,6 +211,15 @@
self.systems = SystemsView(app, "sys")
self.add_mode(self.systems)
+ def set_messaging_mode(self, session):
+ self.set_selected_mode(session, self.messaging)
+
+ def set_grid_mode(self, session):
+ self.set_selected_mode(session, self.grid)
+
+ def set_systems_mode(self, session):
+ self.set_selected_mode(session, self.systems)
+
class HomeView(TabbedModeSet):
def __init__(self, app, name):
super(HomeView, self).__init__(app, name)
Modified: mgmt/trunk/cumin/python/cumin/pool.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/pool.py 2008-11-25 20:02:24 UTC (rev 2880)
+++ mgmt/trunk/cumin/python/cumin/pool.py 2008-11-25 20:22:08 UTC (rev 2881)
@@ -173,17 +173,17 @@
jobs = JobsAndGroupsTab(app, "jobs")
self.__tabs.add_tab(jobs)
- scheds = PoolSchedulerSet(app, "scheds")
- self.__tabs.add_tab(scheds)
+ self.scheds = PoolSchedulerSet(app, "scheds")
+ self.__tabs.add_tab(self.scheds)
subs = PoolSubmitterSet(app, "subs")
self.__tabs.add_tab(subs)
- colls = PoolCollectorSet(app, "colls")
- self.__tabs.add_tab(colls)
+ self.colls = PoolCollectorSet(app, "colls")
+ self.__tabs.add_tab(self.colls)
- negs = PoolNegotiatorSet(app, "negs")
- self.__tabs.add_tab(negs)
+ self.negs = PoolNegotiatorSet(app, "negs")
+ self.__tabs.add_tab(self.negs)
limits = self.LimitsTab(app, "limits")
self.__tabs.add_tab(limits)
@@ -195,6 +195,15 @@
data = "model.xml?class=pool;id=%s" % pool.id
return "wooly.setIntervalUpdate('%s', updatePool, 3000)" % data
+ def set_collector_tab(self, session):
+ self.__tabs.set_selected_mode(session, self.colls)
+
+ def set_negotiator_tab(self, session):
+ self.__tabs.set_selected_mode(session, self.negs)
+
+ def set_scheduler_tab(self, session):
+ self.__tabs.set_selected_mode(session, self.scheds)
+
class PoolSchedulerSet(SchedulerSet):
def get_args(self, session):
return self.frame.get_args(session)
Modified: mgmt/trunk/cumin/python/cumin/system.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/system.py 2008-11-25 20:02:24 UTC (rev 2880)
+++ mgmt/trunk/cumin/python/cumin/system.py 2008-11-25 20:22:08 UTC (rev 2881)
@@ -114,6 +114,7 @@
self.__tabs.add_tab(SystemStats(app, "stats"))
self.__tabs.add_tab(SystemJobSet(app, "jobs"))
self.__tabs.add_tab(SystemSlotSet(app, "slots"))
+ self.__tabs.add_tab(SystemServices(app, "services"))
self.__tabs.add_tab(CuminDetails(app, "details"))
from job import JobTab
@@ -149,3 +150,66 @@
def get_sql_values(self, session, system):
return {"nodeName": system.nodeName}
+
+class SystemServices(ItemSet):
+ def render_title(self, session, *args):
+ return "Services"
+
+ def get_args(self, session):
+ return (self.frame.get_object(session),)
+
+ def do_get_items(self, session, system):
+ daemons = list()
+ daemon_types = [Scheduler, Collector, Negotiator]
+ sql = "system = '%s'" % system.nodeName
+
+ for daemon in daemon_types:
+ system_daemon = daemon.select(sql)
+ try:
+ daemons.append(system_daemon[0])
+ except Exception, e:
+ pass
+
+ brokers = Broker.select("system_id = '%i'" % system.id)
+ for broker in brokers:
+ daemons.append(BrokerRegistration.get(broker.registrationID))
+
+ return daemons
+
+ def render_item_content(self, session, item):
+ if isinstance(item, BrokerRegistration):
+ reg = Identifiable(item.id)
+ self.page.main.set_messaging_tab(session)
+ href = self.page.main.broker.get_href(session, reg)
+ return fmt_link(href, fmt_shorten(item.url))
+ else:
+ pool = model.Pool.get(item.Pool)
+ self.page.main.pool.set_object(session, pool)
+ self.page.main.show_grid_tab(session)
+ daemon = Identifiable(item.id)
+ if isinstance(item, Collector):
+ self.page.main.pool.view.set_collector_tab(session)
+ href = self.page.main.pool.collector.get_href(session, daemon)
+ elif isinstance(item, Scheduler):
+ self.page.main.pool.view.set_scheduler_tab(session)
+ href = self.page.main.pool.scheduler.get_href(session, daemon)
+ elif isinstance(item, Negotiator):
+ self.page.main.pool.view.set_negotiator_tab(session)
+ href = self.page.main.pool.negotiator.get_href(session, daemon)
+ return fmt_link(href, item.Name)
+
+ def render_item_type(self, session, item):
+ if isinstance(item, Collector):
+ return "Collector"
+ elif isinstance(item, Scheduler):
+ return "Scheduler"
+ elif isinstance(item, Negotiator):
+ return "Negotiator"
+ elif isinstance(item, BrokerRegistration):
+ return "Broker"
+ else:
+ return "Daemon"
+
+
+
+
\ No newline at end of file
Modified: mgmt/trunk/cumin/python/cumin/system.strings
===================================================================
--- mgmt/trunk/cumin/python/cumin/system.strings 2008-11-25 20:02:24 UTC (rev 2880)
+++ mgmt/trunk/cumin/python/cumin/system.strings 2008-11-25 20:22:08 UTC (rev 2881)
@@ -94,3 +94,13 @@
</table>
<div>{hidden_inputs}</div>
</form>
+
+
+[SystemServices.html]
+<h2>Services</h2>
+<table class="PropertySet">
+ <tbody>{items}</tbody>
+</table>
+
+[SystemServices.item_html]
+<tr><th>{item_type}</th><td>{item_content}</td></tr>
16 years, 1 month
rhmessaging commits: r2880 - in mgmt/trunk: mint/python/mint and 1 other directory.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 15:02:24 -0500 (Tue, 25 Nov 2008)
New Revision: 2880
Modified:
mgmt/trunk/cumin/python/cumin/model.py
mgmt/trunk/mint/python/mint/__init__.py
mgmt/trunk/mint/python/mint/update.py
Log:
Use a MintBroker object as a proxy to the underlying qmf broker object.
On it we keep per broker id mappings and orphan tracking.
Modified: mgmt/trunk/cumin/python/cumin/model.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/model.py 2008-11-25 20:00:55 UTC (rev 2879)
+++ mgmt/trunk/cumin/python/cumin/model.py 2008-11-25 20:02:24 UTC (rev 2880)
@@ -255,9 +255,9 @@
def get_session_by_registration(self, reg):
assert reg.broker
- assert reg.broker.managedBroker in self.model.data.managedBrokers
+ assert reg.broker.managedBroker in self.model.data.mintBrokers
- broker = self.model.data.managedBrokers[reg.broker.managedBroker][0]
+ broker = self.model.data.mintBrokers[reg.broker.managedBroker]
return broker.getAmqpSession()
class CuminActionInvocation(object):
Modified: mgmt/trunk/mint/python/mint/__init__.py
===================================================================
--- mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 20:00:55 UTC (rev 2879)
+++ mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 20:02:24 UTC (rev 2880)
@@ -294,6 +294,18 @@
brokers = SQLMultipleJoin("BrokerRegistration", joinColumn="profile_id")
properties = SQLMultipleJoin("ConfigProperty", joinColumn="profile_id")
+class MintBroker(object):
+ def __init__(self, qmfBroker):
+ self.qmfBroker = qmfBroker
+
+ self.qmfId = str(self.qmfBroker.getBrokerId())
+ self.databaseId = None
+ self.objectDatabaseIds = MintCache() # database ids by qmf object id
+ self.orphans = dict() # updates by qmf object id
+
+ def getAmqpSession(self):
+ return self.qmfBroker.getAmqpSession()
+
class MintModel(qmf.console.Console):
staticInstance = None
@@ -304,28 +316,15 @@
assert MintModel.staticInstance is None
MintModel.staticInstance = self
- self.connCloseListener = None
+ self.mintBrokers = dict() # MintBrokers by qmfId
+
self.__lock = RLock()
- self.dbStyle = MixedCaseUnderscoreStyle()
self.dbConn = None
- # map containing updateObjects that have a missing parent
- # dependency, for deferred insertion (missing_class,
- # missing_id.first, missing_id.second) -> [updateObject, ...,
- # updateObject]
- self.orphanObjectMap = dict()
-
- self.orphans = dict()
-
self.updateThread = update.ModelUpdateThread(self)
- self.mgmtSession = qmf.console.Session(self)
+ self.mgmtSession = qmf.console.Session(self, manageConnections=True)
self.outstandingMethodCalls = dict()
- self.managedBrokers = dict()
- # cache contains mapping between qmf ids and database ids
- # (idFirst, idSecond) -> dbId
- self.cache = MintCache()
-
if self.debug:
log.setLevel(logging.DEBUG)
@@ -352,20 +351,18 @@
def stop(self):
self.updateThread.stop()
- def setCloseListener(self, connCloseListener):
- self.connCloseListener = connCloseListener
-
def getSession(self):
return self.mgmtSession
- def callMethod(self, managedBroker, objId, classKey, methodName, callback, args):
+ def callMethod(self, brokerId, objId, classKey, methodName, callback, args):
self.lock()
try:
- broker, dbObjId = self.managedBrokers[managedBroker]
+ broker = self.mintBrokers[brokerId]
finally:
self.unlock()
- seq = self.mgmtSession._sendMethodRequest(broker, ClassKey(classKey), objId, methodName, args)
+ seq = self.mgmtSession._sendMethodRequest \
+ (broker.qmfBroker, ClassKey(classKey), objId, methodName, args)
if seq is not None:
self.lock()
@@ -379,8 +376,8 @@
""" Invoked when a connection is established to a broker """
self.lock()
try:
- self.managedBrokers[str(broker.getBrokerId())] = (broker, 0)
- broker.idCache = MintCache()
+ mbroker = MintBroker(broker)
+ self.mintBrokers[mbroker.qmfId] = mbroker
finally:
self.unlock()
@@ -388,11 +385,9 @@
""" Invoked when the connection to a broker is lost """
self.lock()
try:
- del self.managedBrokers[str(broker.getBrokerId())]
+ del self.mintBrokers[str(broker.getBrokerId())]
finally:
self.unlock()
- if (self.connCloseListener != None):
- self.connCloseListener(broker)
def newPackage(self, name):
""" Invoked when a QMF package is discovered. """
@@ -414,7 +409,8 @@
def objectProps(self, broker, record):
""" Invoked when an object is updated. """
- up = update.PropertyUpdate(self, broker, record)
+ mbroker = self.mintBrokers[str(broker.getBrokerId())]
+ up = update.PropertyUpdate(self, mbroker, record)
if record.getClassKey().getClassName() == "job":
up.priority = 1
@@ -424,7 +420,8 @@
def objectStats(self, broker, record):
""" Invoked when an object is updated. """
- up = update.StatisticUpdate(self, broker, record)
+ mbroker = self.mintBrokers[str(broker.getBrokerId())]
+ up = update.StatisticUpdate(self, mbroker, record)
if record.getClassKey().getClassName() == "job":
up.priority = 1
@@ -439,11 +436,16 @@
pass
def brokerInfo(self, broker):
+ # XXX why do we do this?
self.lock()
try:
- self.managedBrokers[str(broker.getBrokerId())] = (broker, 0)
+ mbroker = MintBroker(broker)
+ self.mintBrokers[mbroker.qmfId] = mbroker
finally:
self.unlock()
def methodResponse(self, broker, seq, response):
- self.updateThread.enqueue(update.MethodUpdate(broker, seq, response))
+ mbroker = self.mintBrokers[str(broker.getBrokerId())]
+ up = update.MethodUpdate(self, mbroker, seq, response)
+
+ self.updateThread.enqueue(up)
Modified: mgmt/trunk/mint/python/mint/update.py
===================================================================
--- mgmt/trunk/mint/python/mint/update.py 2008-11-25 20:00:55 UTC (rev 2879)
+++ mgmt/trunk/mint/python/mint/update.py 2008-11-25 20:02:24 UTC (rev 2880)
@@ -4,6 +4,7 @@
import types
import pickle
import psycopg2
+import mint
from Queue import Queue as ConcurrentQueue, Full, Empty
from threading import Thread
from traceback import print_exc
@@ -68,20 +69,20 @@
conn.commit()
- for broker, id in self.model.managedBrokers.values():
- broker.idCache.commit()
+ for broker in self.model.mintBrokers.values():
+ broker.objectDatabaseIds.commit()
profile.commitTime += clock() - start
else:
conn.commit()
- for broker, id in self.model.managedBrokers.values():
- broker.idCache.commit()
+ for broker in self.model.mintBrokers.values():
+ broker.objectDatabaseIds.commit()
except:
- conn.rollback()
+ conn.rollback()
- for broker, id in self.model.managedBrokers.values():
- broker.idCache.rollback()
+ for broker in self.model.mintBrokers.values():
+ broker.objectDatabaseIds.rollback()
log.exception("Update failed")
@@ -151,7 +152,7 @@
foreignKey = name + "_id"
- id = self.broker.idCache.get(oid)
+ id = self.broker.objectDatabaseIds.get(oid)
if id is None:
raise ReferenceException(oid)
@@ -182,10 +183,10 @@
log.info("Referenced object %r not found", e.sought)
try:
- orphans = self.model.orphans[oid]
+ orphans = self.broker.orphans[oid]
orphans.append(self)
except KeyError:
- self.model.orphans[oid] = list((self,))
+ self.broker.orphans[oid] = list((self,))
return
@@ -202,7 +203,7 @@
attrs["sourceScopeId"] = oid.first
attrs["sourceObjectId"] = oid.second
attrs["qmfClassKey"] = str(self.object.getClassKey())
- attrs["managedBroker"] = str(self.broker.getBrokerId())
+ attrs["managedBroker"] = self.broker.qmfId
cursor = conn.cursor()
@@ -212,7 +213,7 @@
# 2. Object is in mint's db, but id is not yet cached
# 3. Object is in mint's db, and id is cached
- id = self.broker.idCache.get(oid)
+ id = self.broker.objectDatabaseIds.get(oid)
if id is None:
# Case 1 or 2
@@ -247,7 +248,7 @@
assert cursor.rowcount == 1
- self.broker.idCache.set(oid, id)
+ self.broker.objectDatabaseIds.set(oid, id)
else:
# Case 3
@@ -259,7 +260,7 @@
assert cursor.rowcount == 1
try:
- orphans = self.model.orphans.pop(oid)
+ orphans = self.broker.orphans.pop(oid)
if orphans:
log.info("Re-enqueueing %i orphans whose creation had been deferred",
@@ -271,25 +272,26 @@
pass
def processBroker(self, cursor, id):
- brokerId = str(self.broker.getBrokerId())
+ try:
+ broker = self.model.mintBrokers[self.broker.qmfId]
+ except KeyError:
+ # XXX what does this mean?
+ return
- if brokerId in self.model.managedBrokers:
- broker, dbId = self.model.managedBrokers[brokerId]
+ if broker.databaseId is None:
+ op = SqlGetBrokerRegistration()
+ op.execute(cursor, {"url": self.broker.getFullUrl()})
- if dbId == 0:
- op = SqlGetBrokerRegistration()
- op.execute(cursor, {"url": self.broker.getFullUrl()})
+ rec = cursor.fetchone()
- rec = cursor.fetchone()
+ if rec:
+ rid = rec[0]
- if rec:
- rid = rec[0]
+ op = SqlAttachBroker()
+ op.execute(cursor, {"id": id, "registrationId": rid})
- op = SqlAttachBroker()
- op.execute(cursor, {"id": id, "registrationId": rid})
+ broker.databaseId = id
- self.model.managedBrokers[brokerId] = (broker, id)
-
class StatisticUpdate(ModelUpdate):
def process(self, conn):
try:
@@ -301,7 +303,7 @@
statsCls = getattr(mint, "%sStats" % cls.__name__)
oid = self.object.getObjectId()
- id = self.broker.idCache.get(oid)
+ id = self.broker.objectDatabaseIds.get(oid)
if id is None:
# Just drop it; we'll get more stats later
@@ -332,8 +334,8 @@
op.execute(cursor, {"statsId": statsId, "id": id})
class MethodUpdate(ModelUpdate):
- def __init__(self, broker, seq, response):
- super(MethodUpdate, self).__init__(broker, response)
+ def __init__(self, model, broker, seq, response):
+ super(MethodUpdate, self).__init__(model, broker, response)
self.seq = seq
16 years, 1 month
rhmessaging commits: r2879 - mgmt/trunk/cumin/python/cumin.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 15:00:55 -0500 (Tue, 25 Nov 2008)
New Revision: 2879
Modified:
mgmt/trunk/cumin/python/cumin/broker.py
Log:
Force a sync to pick up updates from the mint thread
Modified: mgmt/trunk/cumin/python/cumin/broker.py
===================================================================
--- mgmt/trunk/cumin/python/cumin/broker.py 2008-11-25 16:32:43 UTC (rev 2878)
+++ mgmt/trunk/cumin/python/cumin/broker.py 2008-11-25 20:00:55 UTC (rev 2879)
@@ -214,6 +214,8 @@
pass
def do_process(self, session, reg):
+ reg.sync()
+
if reg.broker:
self.__vhost.set(session, reg.getDefaultVhost())
else:
16 years, 1 month
rhmessaging commits: r2878 - in mgmt/trunk/mint: sql and 1 other directory.
by rhmessaging-commits@lists.jboss.org
Author: justi9
Date: 2008-11-25 11:32:43 -0500 (Tue, 25 Nov 2008)
New Revision: 2878
Modified:
mgmt/trunk/mint/python/mint/__init__.py
mgmt/trunk/mint/python/mint/cache.py
mgmt/trunk/mint/python/mint/schema.py
mgmt/trunk/mint/python/mint/schemaparser.py
mgmt/trunk/mint/python/mint/sql.py
mgmt/trunk/mint/python/mint/update.py
mgmt/trunk/mint/sql/schema.sql
Log:
Make object id unique constraint include the broker, since objectids
from qmf are not themselves unique across brokers.
Make id caching a per broker thing.
Modified: mgmt/trunk/mint/python/mint/__init__.py
===================================================================
--- mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/__init__.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -380,7 +380,7 @@
self.lock()
try:
self.managedBrokers[str(broker.getBrokerId())] = (broker, 0)
-
+ broker.idCache = MintCache()
finally:
self.unlock()
Modified: mgmt/trunk/mint/python/mint/cache.py
===================================================================
--- mgmt/trunk/mint/python/mint/cache.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/cache.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -1,12 +1,7 @@
from threading import RLock
class MintCache(object):
- staticInstance = None
-
def __init__(self):
- assert MintCache.staticInstance is None
- MintCache.staticInstance = self
-
self.__cache = dict()
self.__pending = dict()
self.__dirty = False
Modified: mgmt/trunk/mint/python/mint/schema.py
===================================================================
--- mgmt/trunk/mint/python/mint/schema.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/schema.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -14,13 +14,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SlotStats', cascade='null', default=None)
statsPrev = ForeignKey('SlotStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -135,13 +135,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('JobStats', cascade='null', default=None)
statsPrev = ForeignKey('JobStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -247,13 +247,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SchedulerStats', cascade='null', default=None)
statsPrev = ForeignKey('SchedulerStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -300,13 +300,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SubmitterStats', cascade='null', default=None)
statsPrev = ForeignKey('SubmitterStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -336,13 +336,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('NegotiatorStats', cascade='null', default=None)
statsPrev = ForeignKey('NegotiatorStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -404,13 +404,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('CollectorStats', cascade='null', default=None)
statsPrev = ForeignKey('CollectorStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -438,13 +438,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('MasterStats', cascade='null', default=None)
statsPrev = ForeignKey('MasterStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -500,13 +500,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('AclStats', cascade='null', default=None)
statsPrev = ForeignKey('AclStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -541,13 +541,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('ClusterStats', cascade='null', default=None)
statsPrev = ForeignKey('ClusterStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -588,13 +588,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('StoreStats', cascade='null', default=None)
statsPrev = ForeignKey('StoreStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -636,13 +636,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('JournalStats', cascade='null', default=None)
statsPrev = ForeignKey('JournalStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -712,13 +712,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SystemStats', cascade='null', default=None)
statsPrev = ForeignKey('SystemStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -746,13 +746,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('BrokerStats', cascade='null', default=None)
statsPrev = ForeignKey('BrokerStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -828,13 +828,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('AgentStats', cascade='null', default=None)
statsPrev = ForeignKey('AgentStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -862,13 +862,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('VhostStats', cascade='null', default=None)
statsPrev = ForeignKey('VhostStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -893,13 +893,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('QueueStats', cascade='null', default=None)
statsPrev = ForeignKey('QueueStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -963,13 +963,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('ExchangeStats', cascade='null', default=None)
statsPrev = ForeignKey('ExchangeStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1008,13 +1008,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('BindingStats', cascade='null', default=None)
statsPrev = ForeignKey('BindingStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1042,13 +1042,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('ClientConnectionStats', cascade='null', default=None)
statsPrev = ForeignKey('ClientConnectionStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1087,13 +1087,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('LinkStats', cascade='null', default=None)
statsPrev = ForeignKey('LinkStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1153,13 +1153,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('BridgeStats', cascade='null', default=None)
statsPrev = ForeignKey('BridgeStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1198,13 +1198,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SessionStats', cascade='null', default=None)
statsPrev = ForeignKey('SessionStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
@@ -1262,13 +1262,13 @@
lazyUpdate = True
recTime = TimestampCol(default=None)
+ managedBroker = StringCol(length=1000, default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
- source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
+ source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = StringCol(length=1000, default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
- managedBroker = StringCol(length=1000, default=None)
statsCurr = ForeignKey('SysimageStats', cascade='null', default=None)
statsPrev = ForeignKey('SysimageStats', cascade='null', default=None)
classInfos = dict() # brokerId => classInfo
Modified: mgmt/trunk/mint/python/mint/schemaparser.py
===================================================================
--- mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -92,7 +92,7 @@
self.pythonOutput += " lazyUpdate = %s\n\n" % (lazyUpdate)
def generateSourceIdsIndex(self, className):
- self.pythonOutput += " source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)\n"
+ self.pythonOutput += " source_ids_unique = DatabaseIndex(managedBroker, sourceScopeId, sourceObjectId, unique=True)\n"
def generateClassAttribs(self, schemaName, elements):
if (schemaName == "JournalStats"):
@@ -143,13 +143,13 @@
self.generateForeignKeyAttrib(colPythonName[0].lower() + colPythonName[1:], keyPythonName)
self.generateMultipleJoin(origPythonName, pythonName, "stats")
else:
+ self.generateAttrib("managedBroker", "StringCol", "length=1000")
self.generateAttrib("sourceScopeId", "BigIntCol")
self.generateAttrib("sourceObjectId", "BigIntCol")
self.generateSourceIdsIndex(pythonName)
self.generateAttrib("qmfClassKey", "StringCol", "length=1000")
self.generateTimestampAttrib("creation")
self.generateTimestampAttrib("deletion")
- self.generateAttrib("managedBroker", "StringCol", "length=1000")
self.generateForeignKeyAttrib("statsCurr", statsPythonName)
self.generateForeignKeyAttrib("statsPrev", statsPythonName)
self.finalPythonOutput += "classToSchemaNameMap['%s'] = '%s'\n" % (pythonName, schemaName)
Modified: mgmt/trunk/mint/python/mint/sql.py
===================================================================
--- mgmt/trunk/mint/python/mint/sql.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/sql.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -72,6 +72,7 @@
select id from %s
where source_scope_id = %%(sourceScopeId)s
and source_object_id = %%(sourceObjectId)s
+ and managed_broker = %%(managedBroker)s
""" % table
class SqlSetStatsRefs(SqlOperation):
Modified: mgmt/trunk/mint/python/mint/update.py
===================================================================
--- mgmt/trunk/mint/python/mint/update.py 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/python/mint/update.py 2008-11-25 16:32:43 UTC (rev 2878)
@@ -65,15 +65,24 @@
if profile:
start = clock()
+
conn.commit()
- self.model.cache.commit()
+
+ for broker, id in self.model.managedBrokers.values():
+ broker.idCache.commit()
+
profile.commitTime += clock() - start
else:
conn.commit()
- self.model.cache.commit()
+
+ for broker, id in self.model.managedBrokers.values():
+ broker.idCache.commit()
except:
conn.rollback()
- self.model.cache.rollback()
+
+ for broker, id in self.model.managedBrokers.values():
+ broker.idCache.rollback()
+
log.exception("Update failed")
def stop(self):
@@ -142,7 +151,7 @@
foreignKey = name + "_id"
- id = self.model.cache.get(oid)
+ id = self.broker.idCache.get(oid)
if id is None:
raise ReferenceException(oid)
@@ -203,7 +212,7 @@
# 2. Object is in mint's db, but id is not yet cached
# 3. Object is in mint's db, and id is cached
- id = self.model.cache.get(oid)
+ id = self.broker.idCache.get(oid)
if id is None:
# Case 1 or 2
@@ -238,7 +247,7 @@
assert cursor.rowcount == 1
- self.model.cache.set(oid, id)
+ self.broker.idCache.set(oid, id)
else:
# Case 3
@@ -265,16 +274,12 @@
brokerId = str(self.broker.getBrokerId())
if brokerId in self.model.managedBrokers:
- broker, dbObjId = self.model.managedBrokers[brokerId]
+ broker, dbId = self.model.managedBrokers[brokerId]
- #print "broker, dbObjId", broker, dbObjId
-
- if dbObjId == 0:
+ if dbId == 0:
op = SqlGetBrokerRegistration()
op.execute(cursor, {"url": self.broker.getFullUrl()})
- #print op.text % {"url": self.broker.getFullUrl()}
-
rec = cursor.fetchone()
if rec:
@@ -283,12 +288,8 @@
op = SqlAttachBroker()
op.execute(cursor, {"id": id, "registrationId": rid})
- #print op.text % {"id": id, "registrationId": rid}
-
self.model.managedBrokers[brokerId] = (broker, id)
-
-
class StatisticUpdate(ModelUpdate):
def process(self, conn):
try:
@@ -300,7 +301,7 @@
statsCls = getattr(mint, "%sStats" % cls.__name__)
oid = self.object.getObjectId()
- id = self.model.cache.get(oid)
+ id = self.broker.idCache.get(oid)
if id is None:
# Just drop it; we'll get more stats later
Modified: mgmt/trunk/mint/sql/schema.sql
===================================================================
--- mgmt/trunk/mint/sql/schema.sql 2008-11-25 15:07:55 UTC (rev 2877)
+++ mgmt/trunk/mint/sql/schema.sql 2008-11-25 16:32:43 UTC (rev 2878)
@@ -65,12 +65,12 @@
CREATE TABLE acl (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
broker_id INT,
@@ -79,7 +79,7 @@
transfer_acl BOOL,
last_acl_load TIMESTAMP
);
-CREATE UNIQUE INDEX acl_source_ids_unique ON acl (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX acl_source_ids_unique ON acl (managed_broker, source_scope_id, source_object_id);
CREATE TABLE acl_stats (
id SERIAL PRIMARY KEY,
@@ -91,12 +91,12 @@
CREATE TABLE agent (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
client_connection_id INT,
@@ -106,7 +106,7 @@
broker_bank BIGINT,
agent_bank BIGINT
);
-CREATE UNIQUE INDEX agent_source_ids_unique ON agent (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX agent_source_ids_unique ON agent (managed_broker, source_scope_id, source_object_id);
CREATE TABLE agent_stats (
id SERIAL PRIMARY KEY,
@@ -117,12 +117,12 @@
CREATE TABLE binding (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
exchange_id INT,
@@ -131,7 +131,7 @@
arguments BYTEA,
origin VARCHAR(1000)
);
-CREATE UNIQUE INDEX binding_source_ids_unique ON binding (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX binding_source_ids_unique ON binding (managed_broker, source_scope_id, source_object_id);
CREATE TABLE binding_stats (
id SERIAL PRIMARY KEY,
@@ -143,12 +143,12 @@
CREATE TABLE bridge (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
link_id INT,
@@ -163,7 +163,7 @@
excludes VARCHAR(1000),
dynamic BOOL
);
-CREATE UNIQUE INDEX bridge_source_ids_unique ON bridge (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX bridge_source_ids_unique ON bridge (managed_broker, source_scope_id, source_object_id);
CREATE TABLE bridge_stats (
id SERIAL PRIMARY KEY,
@@ -174,12 +174,12 @@
CREATE TABLE broker (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
system_id INT,
@@ -193,7 +193,7 @@
data_dir VARCHAR(1000),
registration_id INT
);
-CREATE UNIQUE INDEX broker_source_ids_unique ON broker (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX broker_source_ids_unique ON broker (managed_broker, source_scope_id, source_object_id);
CREATE TABLE broker_stats (
id SERIAL PRIMARY KEY,
@@ -204,12 +204,12 @@
CREATE TABLE client_connection (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
vhost_id INT,
@@ -219,7 +219,7 @@
federation_link BOOL,
auth_identity VARCHAR(1000)
);
-CREATE UNIQUE INDEX client_connection_source_ids_unique ON client_connection (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX client_connection_source_ids_unique ON client_connection (managed_broker, source_scope_id, source_object_id);
CREATE TABLE client_connection_stats (
id SERIAL PRIMARY KEY,
@@ -235,12 +235,12 @@
CREATE TABLE cluster (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
broker_id INT,
@@ -251,7 +251,7 @@
status VARCHAR(1000),
members VARCHAR(4000)
);
-CREATE UNIQUE INDEX cluster_source_ids_unique ON cluster (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX cluster_source_ids_unique ON cluster (managed_broker, source_scope_id, source_object_id);
CREATE TABLE cluster_stats (
id SERIAL PRIMARY KEY,
@@ -262,12 +262,12 @@
CREATE TABLE collector (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
pool VARCHAR(1000),
@@ -277,7 +277,7 @@
name VARCHAR(1000),
public_network_ip_addr VARCHAR(1000)
);
-CREATE UNIQUE INDEX collector_source_ids_unique ON collector (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX collector_source_ids_unique ON collector (managed_broker, source_scope_id, source_object_id);
CREATE TABLE collector_stats (
id SERIAL PRIMARY KEY,
@@ -288,12 +288,12 @@
CREATE TABLE exchange (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
vhost_id INT,
@@ -302,7 +302,7 @@
durable BOOL,
arguments BYTEA
);
-CREATE UNIQUE INDEX exchange_source_ids_unique ON exchange (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX exchange_source_ids_unique ON exchange (managed_broker, source_scope_id, source_object_id);
CREATE TABLE exchange_stats (
id SERIAL PRIMARY KEY,
@@ -325,12 +325,12 @@
CREATE TABLE job (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
scheduler_id INT,
@@ -362,7 +362,7 @@
dag_man_job_id BIGINT,
ad BYTEA
);
-CREATE UNIQUE INDEX job_source_ids_unique ON job (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX job_source_ids_unique ON job (managed_broker, source_scope_id, source_object_id);
CREATE TABLE job_stats (
id SERIAL PRIMARY KEY,
@@ -373,12 +373,12 @@
CREATE TABLE journal (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
queue_id INT,
@@ -393,7 +393,7 @@
data_file_size BIGINT,
current_file_count BIGINT
);
-CREATE UNIQUE INDEX journal_source_ids_unique ON journal (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX journal_source_ids_unique ON journal (managed_broker, source_scope_id, source_object_id);
CREATE TABLE journal_stats (
id SERIAL PRIMARY KEY,
@@ -432,12 +432,12 @@
CREATE TABLE link (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
vhost_id INT,
@@ -446,7 +446,7 @@
transport VARCHAR(1000),
durable BOOL
);
-CREATE UNIQUE INDEX link_source_ids_unique ON link (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX link_source_ids_unique ON link (managed_broker, source_scope_id, source_object_id);
CREATE TABLE link_stats (
id SERIAL PRIMARY KEY,
@@ -459,12 +459,12 @@
CREATE TABLE master (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
pool VARCHAR(1000),
@@ -478,7 +478,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX master_source_ids_unique ON master (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX master_source_ids_unique ON master (managed_broker, source_scope_id, source_object_id);
CREATE TABLE master_stats (
id SERIAL PRIMARY KEY,
@@ -495,12 +495,12 @@
CREATE TABLE negotiator (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
pool VARCHAR(1000),
@@ -512,7 +512,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX negotiator_source_ids_unique ON negotiator (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX negotiator_source_ids_unique ON negotiator (managed_broker, source_scope_id, source_object_id);
CREATE TABLE negotiator_stats (
id SERIAL PRIMARY KEY,
@@ -534,12 +534,12 @@
CREATE TABLE queue (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
vhost_id INT,
@@ -549,7 +549,7 @@
exclusive BOOL,
arguments BYTEA
);
-CREATE UNIQUE INDEX queue_source_ids_unique ON queue (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX queue_source_ids_unique ON queue (managed_broker, source_scope_id, source_object_id);
CREATE TABLE queue_stats (
id SERIAL PRIMARY KEY,
@@ -587,12 +587,12 @@
CREATE TABLE scheduler (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
pool VARCHAR(1000),
@@ -607,7 +607,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX scheduler_source_ids_unique ON scheduler (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX scheduler_source_ids_unique ON scheduler (managed_broker, source_scope_id, source_object_id);
CREATE TABLE scheduler_stats (
id SERIAL PRIMARY KEY,
@@ -630,12 +630,12 @@
CREATE TABLE session (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
vhost_id INT,
@@ -646,7 +646,7 @@
attached BOOL,
expire_time TIMESTAMP
);
-CREATE UNIQUE INDEX session_source_ids_unique ON session (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX session_source_ids_unique ON session (managed_broker, source_scope_id, source_object_id);
CREATE TABLE session_stats (
id SERIAL PRIMARY KEY,
@@ -662,12 +662,12 @@
CREATE TABLE slot (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
pool VARCHAR(1000),
@@ -724,7 +724,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
-CREATE UNIQUE INDEX slot_source_ids_unique ON slot (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX slot_source_ids_unique ON slot (managed_broker, source_scope_id, source_object_id);
CREATE TABLE slot_stats (
id SERIAL PRIMARY KEY,
@@ -773,12 +773,12 @@
CREATE TABLE store (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
broker_id INT,
@@ -793,7 +793,7 @@
tpl_data_file_size BIGINT,
tpl_current_file_count BIGINT
);
-CREATE UNIQUE INDEX store_source_ids_unique ON store (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX store_source_ids_unique ON store (managed_broker, source_scope_id, source_object_id);
CREATE TABLE store_stats (
id SERIAL PRIMARY KEY,
@@ -813,12 +813,12 @@
CREATE TABLE submitter (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
scheduler_id INT,
@@ -827,7 +827,7 @@
name VARCHAR(1000),
schedd_name VARCHAR(1000)
);
-CREATE UNIQUE INDEX submitter_source_ids_unique ON submitter (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX submitter_source_ids_unique ON submitter (managed_broker, source_scope_id, source_object_id);
CREATE TABLE submitter_stats (
id SERIAL PRIMARY KEY,
@@ -841,12 +841,12 @@
CREATE TABLE sysimage (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
uuid BYTEA,
@@ -858,7 +858,7 @@
mem_total BIGINT,
swap_total BIGINT
);
-CREATE UNIQUE INDEX sysimage_source_ids_unique ON sysimage (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX sysimage_source_ids_unique ON sysimage (managed_broker, source_scope_id, source_object_id);
CREATE TABLE sysimage_stats (
id SERIAL PRIMARY KEY,
@@ -876,12 +876,12 @@
CREATE TABLE system (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
system_id BYTEA,
@@ -891,7 +891,7 @@
version VARCHAR(1000),
machine VARCHAR(1000)
);
-CREATE UNIQUE INDEX system_source_ids_unique ON system (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX system_source_ids_unique ON system (managed_broker, source_scope_id, source_object_id);
CREATE TABLE system_stats (
id SERIAL PRIMARY KEY,
@@ -902,19 +902,19 @@
CREATE TABLE vhost (
id SERIAL PRIMARY KEY,
rec_time TIMESTAMP,
+ managed_broker VARCHAR(1000),
source_scope_id BIGINT,
source_object_id BIGINT,
qmf_class_key VARCHAR(1000),
creation_time TIMESTAMP,
deletion_time TIMESTAMP,
- managed_broker VARCHAR(1000),
stats_curr_id INT,
stats_prev_id INT,
broker_id INT,
name VARCHAR(1000),
federation_tag VARCHAR(1000)
);
-CREATE UNIQUE INDEX vhost_source_ids_unique ON vhost (source_scope_id, source_object_id);
+CREATE UNIQUE INDEX vhost_source_ids_unique ON vhost (managed_broker, source_scope_id, source_object_id);
CREATE TABLE vhost_stats (
id SERIAL PRIMARY KEY,
16 years, 1 month