[rhmessaging-commits] rhmessaging commits: r2833 - in mgmt/trunk/mint: sql and 1 other directory.
rhmessaging-commits at lists.jboss.org
rhmessaging-commits at lists.jboss.org
Tue Nov 18 15:10:24 EST 2008
Author: nunofsantos
Date: 2008-11-18 15:10:24 -0500 (Tue, 18 Nov 2008)
New Revision: 2833
Modified:
mgmt/trunk/mint/python/mint/__init__.py
mgmt/trunk/mint/python/mint/schema.py
mgmt/trunk/mint/python/mint/schemaparser.py
mgmt/trunk/mint/python/mint/update.py
mgmt/trunk/mint/sql/schema.sql
Log:
performance improvements, including caching of qmfId-to-dbId mapping, new db indexes, and optimization of processAttributes
Modified: mgmt/trunk/mint/python/mint/__init__.py
===================================================================
--- mgmt/trunk/mint/python/mint/__init__.py 2008-11-18 19:06:56 UTC (rev 2832)
+++ mgmt/trunk/mint/python/mint/__init__.py 2008-11-18 20:10:24 UTC (rev 2833)
@@ -183,7 +183,7 @@
subject = ForeignKey("Subject", notNull=True, cascade=True)
role = ForeignKey("Role", notNull=True, cascade=True)
- unique = index.DatabaseIndex(subject, role, unique=True)
+ unique = DatabaseIndex(subject, role, unique=True)
class ObjectNotFound(Exception):
pass
@@ -215,7 +215,7 @@
cluster = ForeignKey("BrokerCluster", cascade="null", default=None)
profile = ForeignKey("BrokerProfile", cascade="null", default=None)
- url_unique = index.DatabaseIndex(url, unique=True)
+ url_unique = DatabaseIndex(url, unique=True)
def connect(self, model):
log.info("Connecting to broker '%s' at %s" % (self.name, self.url))
@@ -268,7 +268,7 @@
brokerRegistration = ForeignKey("BrokerRegistration", notNull=True,
cascade=True)
brokerGroup = ForeignKey("BrokerGroup", notNull=True, cascade=True)
- unique = index.DatabaseIndex(brokerRegistration, brokerGroup, unique=True)
+ unique = DatabaseIndex(brokerRegistration, brokerGroup, unique=True)
class BrokerCluster(SQLObject):
class sqlmeta:
@@ -310,6 +310,8 @@
self.outstandingMethodCalls = dict()
self.managedBrokers = dict()
+ self.qmfIdToDbIdMap = dict()
+
if self.debug:
log.setLevel(logging.DEBUG)
@@ -341,13 +343,20 @@
def getObjectId(self, cls, id):
if isinstance(id, qpid.qmfconsole.ObjectId):
+ first = id.first
+ second = id.second
+ if (first, second) in self.qmfIdToDbIdMap:
+ return self.qmfIdToDbIdMap[(first, second)]
+
try:
conn = self.dbConn.getConnection()
cursor = conn.cursor()
cursor.execute("select id from %s where source_scope_id = %s and source_object_id = %s" \
- % (self.dbStyle.pythonClassToDBTable(cls.__name__), id.first, id.second));
- for rec in cursor:
- return rec[0]
+ % (self.dbStyle.pythonClassToDBTable(cls.__name__), first, second));
+ rec = cursor.fetchone()
+ dbId = rec[0]
+ self.qmfIdToDbIdMap[(first, second)] = dbId
+ return dbId
except Exception:
raise ObjectNotFound()
finally:
Modified: mgmt/trunk/mint/python/mint/schema.py
===================================================================
--- mgmt/trunk/mint/python/mint/schema.py 2008-11-18 19:06:56 UTC (rev 2832)
+++ mgmt/trunk/mint/python/mint/schema.py 2008-11-18 20:10:24 UTC (rev 2833)
@@ -16,6 +16,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -136,6 +137,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -247,6 +249,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -299,6 +302,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -334,6 +338,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -401,6 +406,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -434,6 +440,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -495,6 +502,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -535,6 +543,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -581,6 +590,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -628,6 +638,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -703,6 +714,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -736,6 +748,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -817,6 +830,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -850,6 +864,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -880,6 +895,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -949,6 +965,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -993,6 +1010,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1026,6 +1044,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1070,6 +1089,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1135,6 +1155,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1179,6 +1200,7 @@
recTime = TimestampCol(default=None)
sourceScopeId = BigIntCol(default=None)
sourceObjectId = BigIntCol(default=None)
+ source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)
qmfClassKey = BLOBCol(default=None)
creationTime = TimestampCol(default=None)
deletionTime = TimestampCol(default=None)
@@ -1240,7 +1262,8 @@
schemaReservedWordsMap = {"in": "inRsv", "In": "InRsv",
"connection": "clientConnection", "Connection": "ClientConnection",
"connectionRef": "clientConnectionRef",
- "user": "gridUser", "User": "GridUser"}
+ "user": "gridUser", "User": "GridUser",
+ "registeredTo": "broker"}
classToSchemaNameMap['Slot'] = 'Slot'
schemaNameToClassMap['Slot'] = Slot
Modified: mgmt/trunk/mint/python/mint/schemaparser.py
===================================================================
--- mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-18 19:06:56 UTC (rev 2832)
+++ mgmt/trunk/mint/python/mint/schemaparser.py 2008-11-18 20:10:24 UTC (rev 2833)
@@ -34,7 +34,8 @@
self.reservedWords = {"in": "inRsv", "In": "InRsv",
"connection": "clientConnection", "Connection": "ClientConnection",
"connectionRef": "clientConnectionRef",
- "user": "gridUser", "User": "GridUser"}
+ "user": "gridUser", "User": "GridUser",
+ "registeredTo": "broker"}
def renameReservedWord(self, name):
if (name in self.reservedWords.keys()):
@@ -88,6 +89,9 @@
self.pythonOutput += " class sqlmeta:\n"
self.pythonOutput += " lazyUpdate = %s\n\n" % (lazyUpdate)
+ def generateSourceIdsIndex(self, className):
+ self.pythonOutput += " source_ids_unique = DatabaseIndex(sourceScopeId, sourceObjectId, unique=True)\n"
+
def generateClassAttribs(self, schemaName, elements):
if (schemaName == "JournalStats"):
print schemaName
@@ -139,6 +143,7 @@
else:
self.generateAttrib("sourceScopeId", "BigIntCol")
self.generateAttrib("sourceObjectId", "BigIntCol")
+ self.generateSourceIdsIndex(pythonName)
self.generateAttrib("qmfClassKey", "BLOBCol")
self.generateTimestampAttrib("creation")
self.generateTimestampAttrib("deletion")
@@ -194,7 +199,8 @@
self.finalPythonOutput += 'schemaReservedWordsMap = {"in": "inRsv", "In": "InRsv", \n'
self.finalPythonOutput += ' "connection": "clientConnection", "Connection": "ClientConnection", \n'
self.finalPythonOutput += ' "connectionRef": "clientConnectionRef", \n'
- self.finalPythonOutput += ' "user": "gridUser", "User": "GridUser"} \n\n'
+ self.finalPythonOutput += ' "user": "gridUser", "User": "GridUser", \n'
+ self.finalPythonOutput += ' "registeredTo": "broker"} \n\n'
outputFile = open(self.pythonFilePath, "w")
for xmlFile in self.xmlFilePaths:
schema = mllib.xml_parse(xmlFile)
Modified: mgmt/trunk/mint/python/mint/update.py
===================================================================
--- mgmt/trunk/mint/python/mint/update.py 2008-11-18 19:06:56 UTC (rev 2832)
+++ mgmt/trunk/mint/python/mint/update.py 2008-11-18 20:10:24 UTC (rev 2833)
@@ -74,73 +74,62 @@
pass
def processAttributes(self, attrs, cls, model):
- # translate keys into their string representation
- for key in attrs.keys():
- attrs[key.__repr__()] = attrs.pop(key)
-
+ results = {}
orphan = False
- for name in attrs.keys():
- rename = ""
- orig_name = name
- if name in mint.schema.schemaReservedWordsMap:
- rename = mint.schema.schemaReservedWordsMap.get(name)
- attrs[rename] = attrs.pop(name)
- name = rename
- if len(name) > 3 and name.endswith("Ref"):
+ for (key, value) in attrs:
+ name = key.__repr__()
+ if name in mint.schema.schemaReservedWordsMap:
+ name = mint.schema.schemaReservedWordsMap.get(name)
+
+ if key.type == 10:
# Navigate to referenced objects
- clsname = name[0].upper() + name[1:-3]
- id = attrs.pop(name)
-
- othercls = getattr(mint, clsname, None)
-
- if othercls:
- attrname = name[0:-3]
-
- attrname += "_id"
+ if name.endswith("Ref"):
+ name = name[:-3]
+ className = name[0].upper() + name[1:]
+ otherClass = getattr(mint, className, None)
+ if otherClass:
+ foreignKey = name + "_id"
try:
- attrs[attrname] = model.getObjectId(othercls, id)
+ results[foreignKey] = model.getObjectId(otherClass, value)
except KeyError:
- log.info("Referenced object %s '%s' not found by key '%s'" % (clsname, id, attrname))
+ log.info("Referenced object %s '%s' not found by key '%s'" % (className, value, foreignKey))
except mint.ObjectNotFound:
if not orphan:
- log.info("Referenced object %s '%s' not found, deferring creation of orphan object" % (clsname, id))
+ log.info("Referenced object %s '%s' not found, deferring creation of orphan object" % (className, value))
# store object in orphan map, will be picked up later when parent info is received
- if (clsname, id.first, id.second) not in model.orphanObjectMap:
- model.orphanObjectMap[(clsname, id.first, id.second)] = set()
- model.orphanObjectMap[(clsname, id.first, id.second)].add(self)
+ if (className, value.first, value.second) not in model.orphanObjectMap:
+ model.orphanObjectMap[(className, value.first, value.second)] = set()
+ model.orphanObjectMap[(className, value.first, value.second)].add(self)
orphan = True
else:
- log.error("Class '%s' not found" % clsname)
- elif not hasattr(eval("mint.schema."+cls), name):
- # Remove attrs that we don't have in our schema
- log.debug("Class '%s' has no field '%s'" % ("mint.schema."+cls, name))
- del attrs[name]
- #XXX FIX -- TODO when converting to new API, will lookup attribute type in schema representation
- elif name in ("DaemonStartTime", "EnteredCurrentActivity", "EnteredCurrentState", "JobStart",
- "LastBenchmark", "LastFetchWorkCompleted", "LastFetchWorkSpawned", "LastPeriodicCheckpoint",
- "MyCurrentTime", "QDate", "JobQueueBirthdate", "MonitorSelfTime") \
- and (type(attrs[name]) is types.LongType or type(attrs[name]) is types.IntType or attrs[name] == 0):
- attrs[name] = time_unwarp(datetime.fromtimestamp(attrs[name]/1000000000))
- elif name.endswith("Time") and type(attrs[name]) is types.IntType and attrs[name] == 0:
- attrs[name] = time_unwarp(datetime.fromtimestamp(attrs[name]))
- #XXX FIX -- TODO when converting to new API, will lookup attribute type in schema representation
- elif isinstance(attrs[name], UUID):
+ log.error("Class '%s' not found" % className)
+ elif key.type == 8:
+ # convert ABSTIME types
+ if value:
+ results[name] = time_unwarp(datetime.fromtimestamp(value/1000000000))
+ else:
+ results[name] = None
+ elif key.type == 14:
# convert UUIDs into their string representation, to be handled by sqlobject
- attrs[name] = str(attrs[name])
+ results[name] = str(value)
+ elif not hasattr(getattr(mint, cls), name):
+ # Remove attrs that we don't have in our schema
+ log.debug("Class '%s' has no field '%s'" % ("mint.schema." + cls, name))
+ else:
+ results[name] = value
if orphan:
return None
else:
- return attrs
+ return results
-
class PropertyUpdate(ModelUpdate):
def __init__(self, broker, obj):
ModelUpdate.__init__(self, broker, obj)
def process(self, model):
try:
- attrs = dict(self.qmfObj.getProperties())
+ properties = self.qmfObj.getProperties()
timestamps = self.qmfObj.getTimestamps()
id = self.qmfObj.getObjectId()
pkg, cls, hash = self.qmfObj.getClassKey()
@@ -151,7 +140,8 @@
cls = cls[0].upper()+cls[1:]
sqlCls = model.dbStyle.pythonClassToDBTable(cls)
- if self.processAttributes(attrs, cls, model) == None:
+ attrs = self.processAttributes(properties, cls, model)
+ if attrs == None:
# object is orphan, a parent dependency was not found;
# insertion in db is deferred until parent info is received
return
@@ -172,9 +162,11 @@
try:
cursor = conn.cursor()
sql = model.generateSQLUpdate(sqlCls, attrs, id)
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
if cursor.rowcount == 0:
sql = model.generateSQLInsert(sqlCls, attrs)
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
log.debug("%s(%s) created", cls, id)
conn.commit()
@@ -201,6 +193,7 @@
conn = model.dbConn.getConnection()
cursor = conn.cursor()
sql = model.generateSQLSelectWhere(sqlCls, {"managed_broker": str(self.broker.getBrokerId())})
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
rec = cursor.fetchone()
dbObjId = rec[0]
@@ -208,6 +201,7 @@
cursor = conn.cursor()
sql = model.generateSQLUpdateWhere("broker_registration", {"broker_id": dbObjId}, \
{"url": self.broker.getFullUrl()})
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
conn.commit()
model.managedBrokers[str(self.broker.getBrokerId())] = (broker, dbObjId)
@@ -226,7 +220,7 @@
def process(self, model):
try:
- attrs = dict(self.qmfObj.getStatistics())
+ statistics = self.qmfObj.getStatistics()
timestamps = self.qmfObj.getTimestamps()
id = self.qmfObj.getObjectId()
pkg, cls, hash = self.qmfObj.getClassKey()
@@ -240,27 +234,31 @@
statsCls = self.getStatsClass(cls)
sqlStatsCls = model.dbStyle.pythonClassToDBTable(statsCls)
- if self.processAttributes(attrs, statsCls, model) == None:
+ attrs = self.processAttributes(statistics, statsCls, model)
+ if attrs == None:
# object is orphan, a parent dependency was not found;
# insertion in db is deferred until parent info is received
return
attrs["recTime"] = time_unwarp(datetime.fromtimestamp(timestamps[0]/1000000000))
- conn = model.dbConn.getConnection()
try:
+ conn = model.dbConn.getConnection()
cursor = conn.cursor()
subSql = model.generateSQLSelect(sqlCls, id)
sql = model.generateSQLInsert(sqlStatsCls, attrs, {sqlCls + "_id": subSql})
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
sql = "select currval('%s_id_seq')" % (sqlStatsCls)
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
rec = cursor.fetchone()
dbStatsId = rec[0]
log.debug("%s(%s) created", statsCls, id)
sql = model.generateSQLUpdate(sqlCls, {"stats_curr_id": dbStatsId}, id, updateStats=True)
+ #log.debug("SQL: %s", sql)
cursor.execute(sql)
conn.commit()
except Exception, e:
Modified: mgmt/trunk/mint/sql/schema.sql
===================================================================
--- mgmt/trunk/mint/sql/schema.sql 2008-11-18 19:06:56 UTC (rev 2832)
+++ mgmt/trunk/mint/sql/schema.sql 2008-11-18 20:10:24 UTC (rev 2833)
@@ -79,6 +79,7 @@
transfer_acl BOOL,
last_acl_load TIMESTAMP
);
+CREATE UNIQUE INDEX acl_source_ids_unique ON acl (source_scope_id, source_object_id);
CREATE TABLE acl_stats (
id SERIAL PRIMARY KEY,
@@ -105,6 +106,7 @@
broker_bank INT,
agent_bank INT
);
+CREATE UNIQUE INDEX agent_source_ids_unique ON agent (source_scope_id, source_object_id);
CREATE TABLE agent_stats (
id SERIAL PRIMARY KEY,
@@ -129,6 +131,7 @@
arguments BYTEA,
origin VARCHAR(1000)
);
+CREATE UNIQUE INDEX binding_source_ids_unique ON binding (source_scope_id, source_object_id);
CREATE TABLE binding_stats (
id SERIAL PRIMARY KEY,
@@ -160,6 +163,7 @@
excludes VARCHAR(1000),
dynamic BOOL
);
+CREATE UNIQUE INDEX bridge_source_ids_unique ON bridge (source_scope_id, source_object_id);
CREATE TABLE bridge_stats (
id SERIAL PRIMARY KEY,
@@ -189,6 +193,7 @@
data_dir VARCHAR(1000),
registration_id INT
);
+CREATE UNIQUE INDEX broker_source_ids_unique ON broker (source_scope_id, source_object_id);
CREATE TABLE broker_stats (
id SERIAL PRIMARY KEY,
@@ -214,6 +219,7 @@
federation_link BOOL,
auth_identity VARCHAR(1000)
);
+CREATE UNIQUE INDEX client_connection_source_ids_unique ON client_connection (source_scope_id, source_object_id);
CREATE TABLE client_connection_stats (
id SERIAL PRIMARY KEY,
@@ -245,6 +251,7 @@
status VARCHAR(1000),
members VARCHAR(4000)
);
+CREATE UNIQUE INDEX cluster_source_ids_unique ON cluster (source_scope_id, source_object_id);
CREATE TABLE cluster_stats (
id SERIAL PRIMARY KEY,
@@ -270,6 +277,7 @@
name VARCHAR(1000),
public_network_ip_addr VARCHAR(1000)
);
+CREATE UNIQUE INDEX collector_source_ids_unique ON collector (source_scope_id, source_object_id);
CREATE TABLE collector_stats (
id SERIAL PRIMARY KEY,
@@ -294,6 +302,7 @@
durable BOOL,
arguments BYTEA
);
+CREATE UNIQUE INDEX exchange_source_ids_unique ON exchange (source_scope_id, source_object_id);
CREATE TABLE exchange_stats (
id SERIAL PRIMARY KEY,
@@ -353,6 +362,7 @@
dag_man_job_id INT,
ad BYTEA
);
+CREATE UNIQUE INDEX job_source_ids_unique ON job (source_scope_id, source_object_id);
CREATE TABLE job_stats (
id SERIAL PRIMARY KEY,
@@ -383,6 +393,7 @@
data_file_size INT,
current_file_count INT
);
+CREATE UNIQUE INDEX journal_source_ids_unique ON journal (source_scope_id, source_object_id);
CREATE TABLE journal_stats (
id SERIAL PRIMARY KEY,
@@ -435,6 +446,7 @@
transport VARCHAR(1000),
durable BOOL
);
+CREATE UNIQUE INDEX link_source_ids_unique ON link (source_scope_id, source_object_id);
CREATE TABLE link_stats (
id SERIAL PRIMARY KEY,
@@ -466,6 +478,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
+CREATE UNIQUE INDEX master_source_ids_unique ON master (source_scope_id, source_object_id);
CREATE TABLE master_stats (
id SERIAL PRIMARY KEY,
@@ -499,6 +512,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
+CREATE UNIQUE INDEX negotiator_source_ids_unique ON negotiator (source_scope_id, source_object_id);
CREATE TABLE negotiator_stats (
id SERIAL PRIMARY KEY,
@@ -535,6 +549,7 @@
exclusive BOOL,
arguments BYTEA
);
+CREATE UNIQUE INDEX queue_source_ids_unique ON queue (source_scope_id, source_object_id);
CREATE TABLE queue_stats (
id SERIAL PRIMARY KEY,
@@ -592,6 +607,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
+CREATE UNIQUE INDEX scheduler_source_ids_unique ON scheduler (source_scope_id, source_object_id);
CREATE TABLE scheduler_stats (
id SERIAL PRIMARY KEY,
@@ -630,6 +646,7 @@
attached BOOL,
expire_time TIMESTAMP
);
+CREATE UNIQUE INDEX session_source_ids_unique ON session (source_scope_id, source_object_id);
CREATE TABLE session_stats (
id SERIAL PRIMARY KEY,
@@ -707,6 +724,7 @@
condor_version VARCHAR(1000),
daemon_start_time TIMESTAMP
);
+CREATE UNIQUE INDEX slot_source_ids_unique ON slot (source_scope_id, source_object_id);
CREATE TABLE slot_stats (
id SERIAL PRIMARY KEY,
@@ -775,6 +793,7 @@
tpl_data_file_size INT,
tpl_current_file_count INT
);
+CREATE UNIQUE INDEX store_source_ids_unique ON store (source_scope_id, source_object_id);
CREATE TABLE store_stats (
id SERIAL PRIMARY KEY,
@@ -808,6 +827,7 @@
name VARCHAR(1000),
schedd_name VARCHAR(1000)
);
+CREATE UNIQUE INDEX submitter_source_ids_unique ON submitter (source_scope_id, source_object_id);
CREATE TABLE submitter_stats (
id SERIAL PRIMARY KEY,
@@ -836,6 +856,7 @@
version VARCHAR(1000),
machine VARCHAR(1000)
);
+CREATE UNIQUE INDEX system_source_ids_unique ON system (source_scope_id, source_object_id);
CREATE TABLE system_stats (
id SERIAL PRIMARY KEY,
@@ -858,6 +879,7 @@
name VARCHAR(1000),
federation_tag VARCHAR(1000)
);
+CREATE UNIQUE INDEX vhost_source_ids_unique ON vhost (source_scope_id, source_object_id);
CREATE TABLE vhost_stats (
id SERIAL PRIMARY KEY,
More information about the rhmessaging-commits
mailing list