From: richard Date: Fri, 20 Sep 2002 01:20:32 +0000 (+0000) Subject: - verify contents of tracker module when the tracker is opened X-Git-Url: https://git.tokkee.org/?a=commitdiff_plain;h=e8ba7d283909bef6f2b06af2bb23f687640de19a;p=roundup.git - verify contents of tracker module when the tracker is opened - performance improvements in *dbm and sq backends - new benchmark module. To use: PYTHONPATH=. python2 test/benchmark.py (yes, it's a little basic at present ;) git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1198 57a73879-2fb5-44c3-a270-3262357dd7e2 --- diff --git a/CHANGES.txt b/CHANGES.txt index 9caa945..a6db56a 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -17,7 +17,8 @@ are given with the most recent entry first. - we now verify instance attributes on instance open and throw a useful error if they're not all there - sf 611217 ] menu() has problems when labelprop==None - +- verify contents of tracker module when the tracker is opened +- performance improvements in *dbm and sq backends 2002-09-13 0.5.0 beta2 . all backends now have a .close() method, and it's used everywhere diff --git a/TODO.txt b/TODO.txt index 6ede22a..120e2e6 100644 --- a/TODO.txt +++ b/TODO.txt @@ -53,5 +53,6 @@ pending web allow multilink selections to select a "none" element to allow bug mailgw some f*ked mailers QUOTE their Re; "Re: "[issue1] bla blah"" bug docs need to mention somewhere how sorting works bug web :multilink isn't working +bug docs mention not putting spaces in tracker URL aliases ======= ========= ============================================================= diff --git a/roundup/backends/back_anydbm.py b/roundup/backends/back_anydbm.py index ef24fd1..ef047ba 100644 --- a/roundup/backends/back_anydbm.py +++ b/roundup/backends/back_anydbm.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -#$Id: back_anydbm.py,v 1.81 2002-09-19 02:37:41 richard Exp $ +#$Id: back_anydbm.py,v 1.82 2002-09-20 01:20:31 richard Exp $ ''' This module defines a backend that saves the hyperdatabase in a database chosen by anydbm. It is guaranteed to always be available in python @@ -236,6 +236,13 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' if __debug__: print >>hyperdb.DEBUG, 'addnode', (self, classname, nodeid, node) + + # add in the "calculated" properties (dupe so we don't affect + # calling code's node assumptions) + node = node.copy() + node['creator'] = self.journaltag + node['creation'] = node['activity'] = date.Date() + self.newnodes.setdefault(classname, {})[nodeid] = 1 self.cache.setdefault(classname, {})[nodeid] = node self.savenode(classname, nodeid, node) @@ -247,6 +254,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, node) self.dirtynodes.setdefault(classname, {})[nodeid] = 1 + # update the activity time (dupe so we don't affect + # calling code's node assumptions) + node = node.copy() + node['activity'] = date.Date() + # can't set without having already loaded the node self.cache[classname][nodeid] = node self.savenode(classname, nodeid, node) @@ -975,7 +987,13 @@ class Class(hyperdb.Class): if propname == 'id': return nodeid + # get the node's dict + d = self.db.getnode(self.classname, nodeid, cache=cache) + + # check for one of the special props if propname == 'creation': + if d.has_key('creation'): + return d['creation'] if not self.do_journal: raise ValueError, 'Journalling is disabled for this class' journal = self.db.getjournal(self.classname, nodeid) @@ -985,6 +1003,8 @@ class Class(hyperdb.Class): # on the strange chance that there's no journal return date.Date() if propname == 'activity': + if d.has_key('activity'): + return d['activity'] if not self.do_journal: raise ValueError, 'Journalling is disabled for this class' journal = self.db.getjournal(self.classname, nodeid) @@ -994,6 +1014,8 @@ class Class(hyperdb.Class): # on the strange chance that there's no journal return date.Date() if propname == 'creator': + if d.has_key('creator'): + return d['creator'] if not self.do_journal: raise ValueError, 'Journalling is disabled for this class' journal = self.db.getjournal(self.classname, nodeid) @@ -1005,9 +1027,6 @@ class Class(hyperdb.Class): # get the property (raises KeyErorr if invalid) prop = self.properties[propname] - # get the node's dict - d = self.db.getnode(self.classname, nodeid, cache=cache) - if not d.has_key(propname): if default is _marker: if isinstance(prop, Multilink): @@ -1103,7 +1122,11 @@ class Class(hyperdb.Class): # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about # the writeable properties. - prop = self.properties[propname] + try: + prop = self.properties[propname] + except KeyError: + raise KeyError, '"%s" has no property named "%s"'%( + self.classname, propname) # if the value's the same as the existing value, no sense in # doing anything @@ -1388,7 +1411,8 @@ class Class(hyperdb.Class): return nodeid finally: cldb.close() - raise KeyError, keyvalue + raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, + keyvalue, self.classname) # change from spec - allows multiple props to match def find(self, **propspec): diff --git a/roundup/backends/back_gadfly.py b/roundup/backends/back_gadfly.py index a4e8a76..76db52e 100644 --- a/roundup/backends/back_gadfly.py +++ b/roundup/backends/back_gadfly.py @@ -1,4 +1,4 @@ -# $Id: back_gadfly.py,v 1.23 2002-09-19 02:37:41 richard Exp $ +# $Id: back_gadfly.py,v 1.24 2002-09-20 01:20:31 richard Exp $ __doc__ = ''' About Gadfly ============ @@ -154,7 +154,7 @@ class GadflyClass: tn = '%s_%s'%(cn, k) frum.append(tn) if isinstance(v, type([])): - s = ','.join([self.arg for x in v]) + s = ','.join([a for x in v]) where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s)) args = args + v else: @@ -176,66 +176,50 @@ class GadflyClass: where.append('id in (%s)'%s) args = args + v - # figure the order by clause + # "grouping" is just the first-order sorting in the SQL fetch + # can modify it...) orderby = [] ordercols = [] + if group[0] is not None and group[1] is not None: + if group[0] != '-': + orderby.append('_'+group[1]) + ordercols.append('_'+group[1]) + else: + orderby.append('_'+group[1]+' desc') + ordercols.append('_'+group[1]) + + # now add in the sorting + group = '' if sort[0] is not None and sort[1] is not None: direction, colname = sort if direction != '-': - if colname == 'activity': - orderby.append('activity') - ordercols.append('max(%s__journal.date) as activity'%cn) - frum.append('%s__journal'%cn) - where.append('%s__journal.nodeid = _%s.id'%(cn, cn)) - elif colname == 'id': + if colname == 'id': orderby.append(colname) - ordercols.append(colname) else: orderby.append('_'+colname) ordercols.append('_'+colname) else: - if colname == 'activity': - orderby.append('activity desc') - ordercols.append('max(%s__journal.date) as activity'%cn) - frum.append('%s__journal'%cn) - where.append('%s__journal.nodeid = _%s.id'%(cn, cn)) - elif colname == 'id': + if colname == 'id': orderby.append(colname+' desc') ordercols.append(colname) else: orderby.append('_'+colname+' desc') ordercols.append('_'+colname) - # figure the group by clause - groupby = [] - groupcols = [] - if group[0] is not None and group[1] is not None: - if group[0] != '-': - groupby.append('_'+group[1]) - groupcols.append('_'+group[1]) - else: - groupby.append('_'+group[1]+' desc') - groupcols.append('_'+group[1]) - # construct the SQL frum = ','.join(frum) - where = ' and '.join(where) - cols = [] + if where: + where = ' where ' + (' and '.join(where)) + else: + where = '' + cols = ['id'] if orderby: cols = cols + ordercols order = ' order by %s'%(','.join(orderby)) else: order = '' - if 0: #groupby: - cols = cols + groupcols - group = ' group by %s'%(','.join(groupby)) - else: - group = '' - if 'id' not in cols: - cols.append('id') cols = ','.join(cols) - sql = 'select %s from %s where %s%s%s'%(cols, frum, where, order, - group) + sql = 'select %s from %s %s%s%s'%(cols, frum, where, group, order) args = tuple(args) if __debug__: print >>hyperdb.DEBUG, 'filter', (self, sql, args) diff --git a/roundup/backends/rdbms_common.py b/roundup/backends/rdbms_common.py index d90653f..93d3e8b 100644 --- a/roundup/backends/rdbms_common.py +++ b/roundup/backends/rdbms_common.py @@ -1,4 +1,4 @@ -# $Id: rdbms_common.py,v 1.6 2002-09-19 05:30:25 richard Exp $ +# $Id: rdbms_common.py,v 1.7 2002-09-20 01:20:32 richard Exp $ # standard python modules import sys, os, time, re, errno, weakref, copy @@ -23,9 +23,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): the sql_* methods that are NotImplemented - we keep a cache of the latest ROW_CACHE_SIZE row fetches. ''' - # flag to set on retired entries - RETIRED_FLAG = '__hyperdb_retired' - def __init__(self, config, journaltag=None): ''' Open the database and load the schema from it. ''' @@ -130,7 +127,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): "properties" is a list of (name, prop) where prop may be an instance of a hyperdb "type" _or_ a string repr of that type. ''' - cols = [] + cols = ['_activity', '_creator', '_creation'] mls = [] # add the multilinks separately for col, prop in properties: @@ -461,6 +458,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cl = self.classes[classname] cols, mls = self.determine_columns(cl.properties.items()) + # add the special props + node = node.copy() + node['creation'] = node['activity'] = date.Date() + node['creator'] = self.journaltag + # default the non-multilink columns for col, prop in cl.properties.items(): if not isinstance(col, Multilink): @@ -499,11 +501,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # make sure we do the commit-time extra stuff for this node self.transactions.append((self.doSaveNode, (classname, nodeid, node))) - def setnode(self, classname, nodeid, node, multilink_changes): + def setnode(self, classname, nodeid, values, multilink_changes): ''' Change the specified node. ''' if __debug__: - print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, node) + print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, values) # clear this node out of the cache if it's in there key = (classname, nodeid) @@ -511,31 +513,40 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): del self.cache[key] self.cache_lru.remove(key) - node = self.serialise(classname, node) + # add the special props + values = values.copy() + values['activity'] = date.Date() + + # make db-friendly + values = self.serialise(classname, values) cl = self.classes[classname] cols = [] mls = [] # add the multilinks separately - for col in node.keys(): - prop = cl.properties[col] + props = cl.getprops() + for col in values.keys(): + prop = props[col] if isinstance(prop, Multilink): mls.append(col) else: cols.append('_'+col) cols.sort() - # make sure the ordering is correct for column name -> column value - vals = tuple([node[col[1:]] for col in cols]) - s = ','.join(['%s=%s'%(x, self.arg) for x in cols]) - cols = ','.join(cols) - - # perform the update cursor = self.conn.cursor() - sql = 'update _%s set %s'%(classname, s) - if __debug__: - print >>hyperdb.DEBUG, 'setnode', (self, sql, vals) - cursor.execute(sql, vals) + + # if there's any updates to regular columns, do them + if cols: + # make sure the ordering is correct for column name -> column value + sqlvals = tuple([values[col[1:]] for col in cols]) + (nodeid,) + s = ','.join(['%s=%s'%(x, self.arg) for x in cols]) + cols = ','.join(cols) + + # perform the update + sql = 'update _%s set %s where id=%s'%(classname, s, self.arg) + if __debug__: + print >>hyperdb.DEBUG, 'setnode', (self, sql, sqlvals) + cursor.execute(sql, sqlvals) # now the fun bit, updating the multilinks ;) for col, (add, remove) in multilink_changes.items(): @@ -552,7 +563,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): self.sql(cursor, sql, (nodeid, removeid)) # make sure we do the commit-time extra stuff for this node - self.transactions.append((self.doSaveNode, (classname, nodeid, node))) + self.transactions.append((self.doSaveNode, (classname, nodeid, values))) def getnode(self, classname, nodeid): ''' Get a node from the database. @@ -603,9 +614,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # save off in the cache key = (classname, nodeid) self.cache[key] = node - # update the LRU - self.cache_lru.insert(0, key) - del self.cache[self.cache_lru.pop()] + # update the LRU + self.cache_lru.insert(0, key) + del self.cache[self.cache_lru.pop()] return node @@ -1171,44 +1182,28 @@ class Class(hyperdb.Class): if propname == 'id': return nodeid + # get the node's dict + d = self.db.getnode(self.classname, nodeid) + if propname == 'creation': - if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' - journal = self.db.getjournal(self.classname, nodeid) - if journal: - return self.db.getjournal(self.classname, nodeid)[0][1] + if d.has_key('creation'): + return d['creation'] else: - # on the strange chance that there's no journal return date.Date() if propname == 'activity': - if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' - journal = self.db.getjournal(self.classname, nodeid) - if journal: - return self.db.getjournal(self.classname, nodeid)[-1][1] + if d.has_key('activity'): + return d['activity'] else: - # on the strange chance that there's no journal return date.Date() if propname == 'creator': - if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' - journal = self.db.getjournal(self.classname, nodeid) - if journal: - name = self.db.getjournal(self.classname, nodeid)[0][2] + if d.has_key('creator'): + return d['creator'] else: - return None - try: - return self.db.user.lookup(name) - except KeyError: - # the journaltag user doesn't exist any more - return None + return self.db.journaltag # get the property (raises KeyErorr if invalid) prop = self.properties[propname] - # get the node's dict - d = self.db.getnode(self.classname, nodeid) #, cache=cache) - if not d.has_key(propname): if default is self._marker: if isinstance(prop, Multilink): @@ -1298,7 +1293,11 @@ class Class(hyperdb.Class): # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about # the writeable properties. - prop = self.properties[propname] + try: + prop = self.properties[propname] + except KeyError: + raise KeyError, '"%s" has no property named "%s"'%( + self.classname, propname) # if the value's the same as the existing value, no sense in # doing anything @@ -1431,14 +1430,12 @@ class Class(hyperdb.Class): except ValueError: raise TypeError, 'new property "%s" not boolean'%propname - node[propname] = value - # nothing to do? if not propvalues: return propvalues # do the set, and journal it - self.db.setnode(self.classname, nodeid, node, multilink_changes) + self.db.setnode(self.classname, nodeid, propvalues, multilink_changes) if self.do_journal: propvalues.update(journalvalues) @@ -1575,16 +1572,15 @@ class Class(hyperdb.Class): raise TypeError, 'No key property set for class %s'%self.classname cursor = self.db.conn.cursor() - sql = 'select id from _%s where _%s=%s'%(self.classname, self.key, - self.db.arg) - if __debug__: - print >>hyperdb.DEBUG, 'lookup', (self, sql, keyvalue) - cursor.execute(sql, (keyvalue,)) + sql = 'select id,__retired__ from _%s where _%s=%s'%(self.classname, + self.key, self.db.arg) + self.db.sql(cursor, sql, (keyvalue,)) - # see if there was a result + # see if there was a result that's not retired l = cursor.fetchall() - if not l: - raise KeyError, keyvalue + if not l or int(l[0][1]): + raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, + keyvalue, self.classname) # return the id return l[0][0] @@ -1665,7 +1661,7 @@ class Class(hyperdb.Class): tn = '%s_%s'%(cn, k) frum.append(tn) if isinstance(v, type([])): - s = ','.join([self.arg for x in v]) + s = ','.join([a for x in v]) where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s)) args = args + v else: @@ -1733,27 +1729,13 @@ class Class(hyperdb.Class): if sort[0] is not None and sort[1] is not None: direction, colname = sort if direction != '-': - if colname == 'activity': - orderby.append('activity') - ordercols.append('max(%s__journal.date) as activity'%cn) - frum.append('%s__journal'%cn) - where.append('%s__journal.nodeid = _%s.id'%(cn, cn)) - # we need to group by id - group = ' group by id' - elif colname == 'id': + if colname == 'id': orderby.append(colname) else: orderby.append('_'+colname) ordercols.append('_'+colname) else: - if colname == 'activity': - orderby.append('activity desc') - ordercols.append('max(%s__journal.date) as activity'%cn) - frum.append('%s__journal'%cn) - where.append('%s__journal.nodeid = _%s.id'%(cn, cn)) - # we need to group by id - group = ' group by id' - elif colname == 'id': + if colname == 'id': orderby.append(colname+' desc') ordercols.append(colname) else: diff --git a/roundup/instance.py b/roundup/instance.py index dc1cc02..b873340 100644 --- a/roundup/instance.py +++ b/roundup/instance.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: instance.py,v 1.8 2002-09-18 00:02:13 richard Exp $ +# $Id: instance.py,v 1.9 2002-09-20 01:20:31 richard Exp $ __doc__ = ''' Tracker handling (open tracker). @@ -39,8 +39,16 @@ class Opener: Raise ValueError if the tracker home doesn't exist. ''' + # sanity check existence of tracker home if not os.path.exists(tracker_home): raise ValueError, 'no such directory: "%s"'%tracker_home + + # sanity check tracker home contents + for reqd in 'config dbinit select_db interfaces'.split(): + if not os.path.exists(os.path.join(tracker_home, '%s.py'%reqd)): + raise TrackerError, 'File "%s.py" missing from tracker '\ + 'home "%s"'%(reqd, tracker_home) + if self.trackers.has_key(tracker_home): return imp.load_package(self.trackers[tracker_home], tracker_home) @@ -54,8 +62,8 @@ class Opener: # ensure the tracker has all the required bits for required in 'config open init Client MailGW'.split(): if not hasattr(tracker, required): - raise TrackerError, 'Required tracker attribute "%s" '\ - 'missing'%required + raise TrackerError, \ + 'Required tracker attribute "%s" missing'%required return tracker diff --git a/roundup/roundupdb.py b/roundup/roundupdb.py index 4ad3eb9..e1f2b52 100644 --- a/roundup/roundupdb.py +++ b/roundup/roundupdb.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: roundupdb.py,v 1.68 2002-09-11 02:20:35 richard Exp $ +# $Id: roundupdb.py,v 1.69 2002-09-20 01:20:31 richard Exp $ __doc__ = """ Extending hyperdb with types specific to issue-tracking. @@ -358,7 +358,10 @@ class IssueClass: # determine what changed for key in oldvalues.keys(): - if key in ['files','messages']: continue + if key in ['files','messages']: + continue + if key in ('activity', 'creator', 'creation'): + continue new_value = cl.get(nodeid, key) # the old value might be non existent try: diff --git a/test/benchmark.py b/test/benchmark.py new file mode 100644 index 0000000..496ce65 --- /dev/null +++ b/test/benchmark.py @@ -0,0 +1,123 @@ +import sys, os, time, shutil + +from roundup.hyperdb import String, Password, Link, Multilink, Date, \ + Interval, DatabaseError, Boolean, Number +from roundup import date, password +from roundup.indexer import Indexer + +def setupSchema(db, module): + status = module.Class(db, "status", name=String()) + status.setkey("name") + user = module.Class(db, "user", username=String(), password=Password(), + assignable=Boolean(), age=Number(), roles=String()) + user.setkey("username") + file = module.FileClass(db, "file", name=String(), type=String(), + comment=String(indexme="yes")) + issue = module.IssueClass(db, "issue", title=String(indexme="yes"), + status=Link("status"), nosy=Multilink("user"), deadline=Date(), + foo=Interval(), files=Multilink("file"), assignedto=Link('user')) + session = module.Class(db, 'session', title=String()) + session.disableJournalling() + db.post_init() + status.create(name="unread") + status.create(name="in-progress") + status.create(name="testing") + status.create(name="resolved") + user.create(username='one') + user.create(username='two') + db.commit() + +class config: + DATABASE='_test_dir' + GADFLY_DATABASE = ('test', DATABASE) + MAILHOST = 'localhost' + MAIL_DOMAIN = 'fill.me.in.' + TRACKER_NAME = 'Roundup issue tracker' + TRACKER_EMAIL = 'issue_tracker@%s'%MAIL_DOMAIN + TRACKER_WEB = 'http://some.useful.url/' + ADMIN_EMAIL = 'roundup-admin@%s'%MAIL_DOMAIN + FILTER_POSITION = 'bottom' # one of 'top', 'bottom', 'top and bottom' + ANONYMOUS_ACCESS = 'deny' # either 'deny' or 'allow' + ANONYMOUS_REGISTER = 'deny' # either 'deny' or 'allow' + MESSAGES_TO_AUTHOR = 'no' # either 'yes' or 'no' + EMAIL_SIGNATURE_POSITION = 'bottom' + +def main(backendname, time=time.time, numissues=10): + try: + exec('from roundup.backends import %s as backend'%backendname) + except ImportError: + return + + if os.path.exists(config.DATABASE): + shutil.rmtree(config.DATABASE) + + times = [] + db = backend.Database(config, 'test') + setupSchema(db, backend) + + # create a whole bunch of stuff + for i in range(numissues): + db.issue.create(**{'title': 'issue %s'%i}) + for j in range(10): + db.issue.set(str(i+1), status='2', assignedto='2', nosy=[]) + db.issue.set(str(i+1), status='1', assignedto='1', nosy=['1','2']) + db.user.create(**{'username': 'user %s'%i}) + for j in range(10): + db.user.set(str(i+1), assignable=1) + db.user.set(str(i+1), assignable=0) + db.commit() + sys.stdout.write('%7s: %-6d'%(backendname, numissues)) + sys.stdout.flush() + + times.append(('start', time())) + + # fetch + for i in db.issue.list(): + db.issue.get(i, 'title') + times.append(('fetch', time())) + + # journals + for i in db.issue.list(): + db.issue.history(i) + times.append(('journal', time())) + + # "calculated" props + for i in db.issue.list(): + db.issue.get(i, 'activity') + db.issue.get(i, 'creator') + db.issue.get(i, 'creation') + times.append(('jprops', time())) + + # lookup + for i in range(numissues): + db.user.lookup('user %s'%i) + times.append(('lookup', time())) + + # filter + for i in range(100): + db.issue.filter(None, {'nosy': ['1'], 'assignedto': '1', + 'title':'issue'}, ('+', 'activity'), ('+', 'status')) + times.append(('filter', time())) + + # results + last = None + for event, stamp in times: + if last is None: + first = stamp + else: + sys.stdout.write(' %-6.2f'%(stamp-last)) + last = stamp + print ' %-6.2f'%(last-first) + sys.stdout.flush() + +if __name__ == '__main__': + # 0 1 2 3 4 5 6 + # 01234567890123456789012345678901234567890123456789012345678901234 + print 'Test name fetch journl jprops lookup filter TOTAL ' + for name in 'anydbm bsddb bsddb3 metakit sqlite'.split(): + main(name) + for name in 'anydbm bsddb bsddb3 metakit sqlite'.split(): + main(name, numissues=20) +# for name in 'anydbm bsddb bsddb3 metakit sqlite'.split(): +# main(name, numissues=100) + diff --git a/test/test_db.py b/test/test_db.py index c9226d2..00e67e7 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: test_db.py,v 1.50 2002-09-19 02:37:41 richard Exp $ +# $Id: test_db.py,v 1.51 2002-09-20 01:20:32 richard Exp $ import unittest, os, shutil, time @@ -158,6 +158,14 @@ class anydbmDBTestCase(MyTestCase): self.db.user.set('1', age=None) self.assertEqual(self.db.user.get('1', "age"), None) + def testKeyValue(self): + newid = self.db.user.create(username="spam") + self.assertEqual(self.db.user.lookup('spam'), newid) + self.db.commit() + self.assertEqual(self.db.user.lookup('spam'), newid) + self.db.user.retire(newid) + self.assertRaises(KeyError, self.db.user.lookup, 'spam') + def testNewProperty(self): self.db.issue.create(title="spam", status='1') self.db.issue.addprop(fixer=Link("user")) @@ -712,7 +720,7 @@ def suite(): unittest.makeSuite(anydbmDBTestCase, 'test'), unittest.makeSuite(anydbmReadOnlyDBTestCase, 'test') ] - #return unittest.TestSuite(l) +# return unittest.TestSuite(l) try: import sqlite @@ -720,6 +728,7 @@ def suite(): l.append(unittest.makeSuite(sqliteReadOnlyDBTestCase, 'test')) except: print 'sqlite module not found, skipping gadfly DBTestCase' +# return unittest.TestSuite(l) try: import gadfly