summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: 87f87b4)
raw | patch | inline | side by side (parent: 87f87b4)
author | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Fri, 20 Sep 2002 01:20:32 +0000 (01:20 +0000) | ||
committer | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Fri, 20 Sep 2002 01:20:32 +0000 (01:20 +0000) |
- performance improvements in *dbm and sq backends
- new benchmark module. To use:
PYTHONPATH=. python2 test/benchmark.py
(yes, it's a little basic at present ;)
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1198 57a73879-2fb5-44c3-a270-3262357dd7e2
- new benchmark module. To use:
PYTHONPATH=. python2 test/benchmark.py
(yes, it's a little basic at present ;)
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1198 57a73879-2fb5-44c3-a270-3262357dd7e2
CHANGES.txt | patch | blob | history | |
TODO.txt | patch | blob | history | |
roundup/backends/back_anydbm.py | patch | blob | history | |
roundup/backends/back_gadfly.py | patch | blob | history | |
roundup/backends/rdbms_common.py | patch | blob | history | |
roundup/instance.py | patch | blob | history | |
roundup/roundupdb.py | patch | blob | history | |
test/benchmark.py | [new file with mode: 0644] | patch | blob |
test/test_db.py | patch | blob | history |
diff --git a/CHANGES.txt b/CHANGES.txt
index 9caa9457fc7258aeebe5aba037e2fd2339b40094..a6db56a057557dc98b614da4f080aa08f7bbf0df 100644 (file)
--- a/CHANGES.txt
+++ b/CHANGES.txt
- we now verify instance attributes on instance open and throw a useful error
if they're not all there
- sf 611217 ] menu() has problems when labelprop==None
-
+- verify contents of tracker module when the tracker is opened
+- performance improvements in *dbm and sq backends
2002-09-13 0.5.0 beta2
. all backends now have a .close() method, and it's used everywhere
diff --git a/TODO.txt b/TODO.txt
index 6ede22a4394441ea0ebc4d62b095ba0965067ce6..120e2e638aa541cfccd793b591054130c7c8020a 100644 (file)
--- a/TODO.txt
+++ b/TODO.txt
bug mailgw some f*ked mailers QUOTE their Re; "Re: "[issue1] bla blah""
bug docs need to mention somewhere how sorting works
bug web :multilink isn't working
+bug docs mention not putting spaces in tracker URL aliases
======= ========= =============================================================
index ef24fd1ca1e63d8dae23f3c428c06c1996c0286f..ef047bac41a21cf2a3000a891adb99a4fb6c037f 100644 (file)
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-#$Id: back_anydbm.py,v 1.81 2002-09-19 02:37:41 richard Exp $
+#$Id: back_anydbm.py,v 1.82 2002-09-20 01:20:31 richard Exp $
'''
This module defines a backend that saves the hyperdatabase in a database
chosen by anydbm. It is guaranteed to always be available in python
'''
if __debug__:
print >>hyperdb.DEBUG, 'addnode', (self, classname, nodeid, node)
+
+ # add in the "calculated" properties (dupe so we don't affect
+ # calling code's node assumptions)
+ node = node.copy()
+ node['creator'] = self.journaltag
+ node['creation'] = node['activity'] = date.Date()
+
self.newnodes.setdefault(classname, {})[nodeid] = 1
self.cache.setdefault(classname, {})[nodeid] = node
self.savenode(classname, nodeid, node)
print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, node)
self.dirtynodes.setdefault(classname, {})[nodeid] = 1
+ # update the activity time (dupe so we don't affect
+ # calling code's node assumptions)
+ node = node.copy()
+ node['activity'] = date.Date()
+
# can't set without having already loaded the node
self.cache[classname][nodeid] = node
self.savenode(classname, nodeid, node)
if propname == 'id':
return nodeid
+ # get the node's dict
+ d = self.db.getnode(self.classname, nodeid, cache=cache)
+
+ # check for one of the special props
if propname == 'creation':
+ if d.has_key('creation'):
+ return d['creation']
if not self.do_journal:
raise ValueError, 'Journalling is disabled for this class'
journal = self.db.getjournal(self.classname, nodeid)
# on the strange chance that there's no journal
return date.Date()
if propname == 'activity':
+ if d.has_key('activity'):
+ return d['activity']
if not self.do_journal:
raise ValueError, 'Journalling is disabled for this class'
journal = self.db.getjournal(self.classname, nodeid)
# on the strange chance that there's no journal
return date.Date()
if propname == 'creator':
+ if d.has_key('creator'):
+ return d['creator']
if not self.do_journal:
raise ValueError, 'Journalling is disabled for this class'
journal = self.db.getjournal(self.classname, nodeid)
# get the property (raises KeyErorr if invalid)
prop = self.properties[propname]
- # get the node's dict
- d = self.db.getnode(self.classname, nodeid, cache=cache)
-
if not d.has_key(propname):
if default is _marker:
if isinstance(prop, Multilink):
# this will raise the KeyError if the property isn't valid
# ... we don't use getprops() here because we only care about
# the writeable properties.
- prop = self.properties[propname]
+ try:
+ prop = self.properties[propname]
+ except KeyError:
+ raise KeyError, '"%s" has no property named "%s"'%(
+ self.classname, propname)
# if the value's the same as the existing value, no sense in
# doing anything
return nodeid
finally:
cldb.close()
- raise KeyError, keyvalue
+ raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key,
+ keyvalue, self.classname)
# change from spec - allows multiple props to match
def find(self, **propspec):
index a4e8a76127bfed5736578975753643dc8ec4ce7c..76db52e48c0095d687755ef1ff6941fa74fc7f7c 100644 (file)
-# $Id: back_gadfly.py,v 1.23 2002-09-19 02:37:41 richard Exp $
+# $Id: back_gadfly.py,v 1.24 2002-09-20 01:20:31 richard Exp $
__doc__ = '''
About Gadfly
============
tn = '%s_%s'%(cn, k)
frum.append(tn)
if isinstance(v, type([])):
- s = ','.join([self.arg for x in v])
+ s = ','.join([a for x in v])
where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s))
args = args + v
else:
where.append('id in (%s)'%s)
args = args + v
- # figure the order by clause
+ # "grouping" is just the first-order sorting in the SQL fetch
+ # can modify it...)
orderby = []
ordercols = []
+ if group[0] is not None and group[1] is not None:
+ if group[0] != '-':
+ orderby.append('_'+group[1])
+ ordercols.append('_'+group[1])
+ else:
+ orderby.append('_'+group[1]+' desc')
+ ordercols.append('_'+group[1])
+
+ # now add in the sorting
+ group = ''
if sort[0] is not None and sort[1] is not None:
direction, colname = sort
if direction != '-':
- if colname == 'activity':
- orderby.append('activity')
- ordercols.append('max(%s__journal.date) as activity'%cn)
- frum.append('%s__journal'%cn)
- where.append('%s__journal.nodeid = _%s.id'%(cn, cn))
- elif colname == 'id':
+ if colname == 'id':
orderby.append(colname)
- ordercols.append(colname)
else:
orderby.append('_'+colname)
ordercols.append('_'+colname)
else:
- if colname == 'activity':
- orderby.append('activity desc')
- ordercols.append('max(%s__journal.date) as activity'%cn)
- frum.append('%s__journal'%cn)
- where.append('%s__journal.nodeid = _%s.id'%(cn, cn))
- elif colname == 'id':
+ if colname == 'id':
orderby.append(colname+' desc')
ordercols.append(colname)
else:
orderby.append('_'+colname+' desc')
ordercols.append('_'+colname)
- # figure the group by clause
- groupby = []
- groupcols = []
- if group[0] is not None and group[1] is not None:
- if group[0] != '-':
- groupby.append('_'+group[1])
- groupcols.append('_'+group[1])
- else:
- groupby.append('_'+group[1]+' desc')
- groupcols.append('_'+group[1])
-
# construct the SQL
frum = ','.join(frum)
- where = ' and '.join(where)
- cols = []
+ if where:
+ where = ' where ' + (' and '.join(where))
+ else:
+ where = ''
+ cols = ['id']
if orderby:
cols = cols + ordercols
order = ' order by %s'%(','.join(orderby))
else:
order = ''
- if 0: #groupby:
- cols = cols + groupcols
- group = ' group by %s'%(','.join(groupby))
- else:
- group = ''
- if 'id' not in cols:
- cols.append('id')
cols = ','.join(cols)
- sql = 'select %s from %s where %s%s%s'%(cols, frum, where, order,
- group)
+ sql = 'select %s from %s %s%s%s'%(cols, frum, where, group, order)
args = tuple(args)
if __debug__:
print >>hyperdb.DEBUG, 'filter', (self, sql, args)
index d90653fed69f86bc2f2f1a55e36ea5aa1f3f7fc6..93d3e8bc463a26455f5406d26aafcfd482071c1d 100644 (file)
-# $Id: rdbms_common.py,v 1.6 2002-09-19 05:30:25 richard Exp $
+# $Id: rdbms_common.py,v 1.7 2002-09-20 01:20:32 richard Exp $
# standard python modules
import sys, os, time, re, errno, weakref, copy
the sql_* methods that are NotImplemented
- we keep a cache of the latest ROW_CACHE_SIZE row fetches.
'''
- # flag to set on retired entries
- RETIRED_FLAG = '__hyperdb_retired'
-
def __init__(self, config, journaltag=None):
''' Open the database and load the schema from it.
'''
"properties" is a list of (name, prop) where prop may be an
instance of a hyperdb "type" _or_ a string repr of that type.
'''
- cols = []
+ cols = ['_activity', '_creator', '_creation']
mls = []
# add the multilinks separately
for col, prop in properties:
cl = self.classes[classname]
cols, mls = self.determine_columns(cl.properties.items())
+ # add the special props
+ node = node.copy()
+ node['creation'] = node['activity'] = date.Date()
+ node['creator'] = self.journaltag
+
# default the non-multilink columns
for col, prop in cl.properties.items():
if not isinstance(col, Multilink):
# make sure we do the commit-time extra stuff for this node
self.transactions.append((self.doSaveNode, (classname, nodeid, node)))
- def setnode(self, classname, nodeid, node, multilink_changes):
+ def setnode(self, classname, nodeid, values, multilink_changes):
''' Change the specified node.
'''
if __debug__:
- print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, node)
+ print >>hyperdb.DEBUG, 'setnode', (self, classname, nodeid, values)
# clear this node out of the cache if it's in there
key = (classname, nodeid)
del self.cache[key]
self.cache_lru.remove(key)
- node = self.serialise(classname, node)
+ # add the special props
+ values = values.copy()
+ values['activity'] = date.Date()
+
+ # make db-friendly
+ values = self.serialise(classname, values)
cl = self.classes[classname]
cols = []
mls = []
# add the multilinks separately
- for col in node.keys():
- prop = cl.properties[col]
+ props = cl.getprops()
+ for col in values.keys():
+ prop = props[col]
if isinstance(prop, Multilink):
mls.append(col)
else:
cols.append('_'+col)
cols.sort()
- # make sure the ordering is correct for column name -> column value
- vals = tuple([node[col[1:]] for col in cols])
- s = ','.join(['%s=%s'%(x, self.arg) for x in cols])
- cols = ','.join(cols)
-
- # perform the update
cursor = self.conn.cursor()
- sql = 'update _%s set %s'%(classname, s)
- if __debug__:
- print >>hyperdb.DEBUG, 'setnode', (self, sql, vals)
- cursor.execute(sql, vals)
+
+ # if there's any updates to regular columns, do them
+ if cols:
+ # make sure the ordering is correct for column name -> column value
+ sqlvals = tuple([values[col[1:]] for col in cols]) + (nodeid,)
+ s = ','.join(['%s=%s'%(x, self.arg) for x in cols])
+ cols = ','.join(cols)
+
+ # perform the update
+ sql = 'update _%s set %s where id=%s'%(classname, s, self.arg)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'setnode', (self, sql, sqlvals)
+ cursor.execute(sql, sqlvals)
# now the fun bit, updating the multilinks ;)
for col, (add, remove) in multilink_changes.items():
self.sql(cursor, sql, (nodeid, removeid))
# make sure we do the commit-time extra stuff for this node
- self.transactions.append((self.doSaveNode, (classname, nodeid, node)))
+ self.transactions.append((self.doSaveNode, (classname, nodeid, values)))
def getnode(self, classname, nodeid):
''' Get a node from the database.
# save off in the cache
key = (classname, nodeid)
self.cache[key] = node
- # update the LRU
- self.cache_lru.insert(0, key)
- del self.cache[self.cache_lru.pop()]
+ # update the LRU
+ self.cache_lru.insert(0, key)
+ del self.cache[self.cache_lru.pop()]
return node
if propname == 'id':
return nodeid
+ # get the node's dict
+ d = self.db.getnode(self.classname, nodeid)
+
if propname == 'creation':
- if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
- journal = self.db.getjournal(self.classname, nodeid)
- if journal:
- return self.db.getjournal(self.classname, nodeid)[0][1]
+ if d.has_key('creation'):
+ return d['creation']
else:
- # on the strange chance that there's no journal
return date.Date()
if propname == 'activity':
- if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
- journal = self.db.getjournal(self.classname, nodeid)
- if journal:
- return self.db.getjournal(self.classname, nodeid)[-1][1]
+ if d.has_key('activity'):
+ return d['activity']
else:
- # on the strange chance that there's no journal
return date.Date()
if propname == 'creator':
- if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
- journal = self.db.getjournal(self.classname, nodeid)
- if journal:
- name = self.db.getjournal(self.classname, nodeid)[0][2]
+ if d.has_key('creator'):
+ return d['creator']
else:
- return None
- try:
- return self.db.user.lookup(name)
- except KeyError:
- # the journaltag user doesn't exist any more
- return None
+ return self.db.journaltag
# get the property (raises KeyErorr if invalid)
prop = self.properties[propname]
- # get the node's dict
- d = self.db.getnode(self.classname, nodeid) #, cache=cache)
-
if not d.has_key(propname):
if default is self._marker:
if isinstance(prop, Multilink):
# this will raise the KeyError if the property isn't valid
# ... we don't use getprops() here because we only care about
# the writeable properties.
- prop = self.properties[propname]
+ try:
+ prop = self.properties[propname]
+ except KeyError:
+ raise KeyError, '"%s" has no property named "%s"'%(
+ self.classname, propname)
# if the value's the same as the existing value, no sense in
# doing anything
except ValueError:
raise TypeError, 'new property "%s" not boolean'%propname
- node[propname] = value
-
# nothing to do?
if not propvalues:
return propvalues
# do the set, and journal it
- self.db.setnode(self.classname, nodeid, node, multilink_changes)
+ self.db.setnode(self.classname, nodeid, propvalues, multilink_changes)
if self.do_journal:
propvalues.update(journalvalues)
raise TypeError, 'No key property set for class %s'%self.classname
cursor = self.db.conn.cursor()
- sql = 'select id from _%s where _%s=%s'%(self.classname, self.key,
- self.db.arg)
- if __debug__:
- print >>hyperdb.DEBUG, 'lookup', (self, sql, keyvalue)
- cursor.execute(sql, (keyvalue,))
+ sql = 'select id,__retired__ from _%s where _%s=%s'%(self.classname,
+ self.key, self.db.arg)
+ self.db.sql(cursor, sql, (keyvalue,))
- # see if there was a result
+ # see if there was a result that's not retired
l = cursor.fetchall()
- if not l:
- raise KeyError, keyvalue
+ if not l or int(l[0][1]):
+ raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key,
+ keyvalue, self.classname)
# return the id
return l[0][0]
tn = '%s_%s'%(cn, k)
frum.append(tn)
if isinstance(v, type([])):
- s = ','.join([self.arg for x in v])
+ s = ','.join([a for x in v])
where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s))
args = args + v
else:
if sort[0] is not None and sort[1] is not None:
direction, colname = sort
if direction != '-':
- if colname == 'activity':
- orderby.append('activity')
- ordercols.append('max(%s__journal.date) as activity'%cn)
- frum.append('%s__journal'%cn)
- where.append('%s__journal.nodeid = _%s.id'%(cn, cn))
- # we need to group by id
- group = ' group by id'
- elif colname == 'id':
+ if colname == 'id':
orderby.append(colname)
else:
orderby.append('_'+colname)
ordercols.append('_'+colname)
else:
- if colname == 'activity':
- orderby.append('activity desc')
- ordercols.append('max(%s__journal.date) as activity'%cn)
- frum.append('%s__journal'%cn)
- where.append('%s__journal.nodeid = _%s.id'%(cn, cn))
- # we need to group by id
- group = ' group by id'
- elif colname == 'id':
+ if colname == 'id':
orderby.append(colname+' desc')
ordercols.append(colname)
else:
diff --git a/roundup/instance.py b/roundup/instance.py
index dc1cc02f0f59b200813f9700ee7332ecad22a108..b87334065a9589cf2a5cc1ea4235b3e752010c0c 100644 (file)
--- a/roundup/instance.py
+++ b/roundup/instance.py
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: instance.py,v 1.8 2002-09-18 00:02:13 richard Exp $
+# $Id: instance.py,v 1.9 2002-09-20 01:20:31 richard Exp $
__doc__ = '''
Tracker handling (open tracker).
Raise ValueError if the tracker home doesn't exist.
'''
+ # sanity check existence of tracker home
if not os.path.exists(tracker_home):
raise ValueError, 'no such directory: "%s"'%tracker_home
+
+ # sanity check tracker home contents
+ for reqd in 'config dbinit select_db interfaces'.split():
+ if not os.path.exists(os.path.join(tracker_home, '%s.py'%reqd)):
+ raise TrackerError, 'File "%s.py" missing from tracker '\
+ 'home "%s"'%(reqd, tracker_home)
+
if self.trackers.has_key(tracker_home):
return imp.load_package(self.trackers[tracker_home],
tracker_home)
# ensure the tracker has all the required bits
for required in 'config open init Client MailGW'.split():
if not hasattr(tracker, required):
- raise TrackerError, 'Required tracker attribute "%s" '\
- 'missing'%required
+ raise TrackerError, \
+ 'Required tracker attribute "%s" missing'%required
return tracker
diff --git a/roundup/roundupdb.py b/roundup/roundupdb.py
index 4ad3eb952cad36a928fdbeafe826141ce7ae8a7e..e1f2b5226a79d6cc210e5847e64f6b6a25ef3153 100644 (file)
--- a/roundup/roundupdb.py
+++ b/roundup/roundupdb.py
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: roundupdb.py,v 1.68 2002-09-11 02:20:35 richard Exp $
+# $Id: roundupdb.py,v 1.69 2002-09-20 01:20:31 richard Exp $
__doc__ = """
Extending hyperdb with types specific to issue-tracking.
# determine what changed
for key in oldvalues.keys():
- if key in ['files','messages']: continue
+ if key in ['files','messages']:
+ continue
+ if key in ('activity', 'creator', 'creation'):
+ continue
new_value = cl.get(nodeid, key)
# the old value might be non existent
try:
diff --git a/test/benchmark.py b/test/benchmark.py
--- /dev/null
+++ b/test/benchmark.py
@@ -0,0 +1,123 @@
+import sys, os, time, shutil
+
+from roundup.hyperdb import String, Password, Link, Multilink, Date, \
+ Interval, DatabaseError, Boolean, Number
+from roundup import date, password
+from roundup.indexer import Indexer
+
+def setupSchema(db, module):
+ status = module.Class(db, "status", name=String())
+ status.setkey("name")
+ user = module.Class(db, "user", username=String(), password=Password(),
+ assignable=Boolean(), age=Number(), roles=String())
+ user.setkey("username")
+ file = module.FileClass(db, "file", name=String(), type=String(),
+ comment=String(indexme="yes"))
+ issue = module.IssueClass(db, "issue", title=String(indexme="yes"),
+ status=Link("status"), nosy=Multilink("user"), deadline=Date(),
+ foo=Interval(), files=Multilink("file"), assignedto=Link('user'))
+ session = module.Class(db, 'session', title=String())
+ session.disableJournalling()
+ db.post_init()
+ status.create(name="unread")
+ status.create(name="in-progress")
+ status.create(name="testing")
+ status.create(name="resolved")
+ user.create(username='one')
+ user.create(username='two')
+ db.commit()
+
+class config:
+ DATABASE='_test_dir'
+ GADFLY_DATABASE = ('test', DATABASE)
+ MAILHOST = 'localhost'
+ MAIL_DOMAIN = 'fill.me.in.'
+ TRACKER_NAME = 'Roundup issue tracker'
+ TRACKER_EMAIL = 'issue_tracker@%s'%MAIL_DOMAIN
+ TRACKER_WEB = 'http://some.useful.url/'
+ ADMIN_EMAIL = 'roundup-admin@%s'%MAIL_DOMAIN
+ FILTER_POSITION = 'bottom' # one of 'top', 'bottom', 'top and bottom'
+ ANONYMOUS_ACCESS = 'deny' # either 'deny' or 'allow'
+ ANONYMOUS_REGISTER = 'deny' # either 'deny' or 'allow'
+ MESSAGES_TO_AUTHOR = 'no' # either 'yes' or 'no'
+ EMAIL_SIGNATURE_POSITION = 'bottom'
+
+def main(backendname, time=time.time, numissues=10):
+ try:
+ exec('from roundup.backends import %s as backend'%backendname)
+ except ImportError:
+ return
+
+ if os.path.exists(config.DATABASE):
+ shutil.rmtree(config.DATABASE)
+
+ times = []
+ db = backend.Database(config, 'test')
+ setupSchema(db, backend)
+
+ # create a whole bunch of stuff
+ for i in range(numissues):
+ db.issue.create(**{'title': 'issue %s'%i})
+ for j in range(10):
+ db.issue.set(str(i+1), status='2', assignedto='2', nosy=[])
+ db.issue.set(str(i+1), status='1', assignedto='1', nosy=['1','2'])
+ db.user.create(**{'username': 'user %s'%i})
+ for j in range(10):
+ db.user.set(str(i+1), assignable=1)
+ db.user.set(str(i+1), assignable=0)
+ db.commit()
+ sys.stdout.write('%7s: %-6d'%(backendname, numissues))
+ sys.stdout.flush()
+
+ times.append(('start', time()))
+
+ # fetch
+ for i in db.issue.list():
+ db.issue.get(i, 'title')
+ times.append(('fetch', time()))
+
+ # journals
+ for i in db.issue.list():
+ db.issue.history(i)
+ times.append(('journal', time()))
+
+ # "calculated" props
+ for i in db.issue.list():
+ db.issue.get(i, 'activity')
+ db.issue.get(i, 'creator')
+ db.issue.get(i, 'creation')
+ times.append(('jprops', time()))
+
+ # lookup
+ for i in range(numissues):
+ db.user.lookup('user %s'%i)
+ times.append(('lookup', time()))
+
+ # filter
+ for i in range(100):
+ db.issue.filter(None, {'nosy': ['1'], 'assignedto': '1',
+ 'title':'issue'}, ('+', 'activity'), ('+', 'status'))
+ times.append(('filter', time()))
+
+ # results
+ last = None
+ for event, stamp in times:
+ if last is None:
+ first = stamp
+ else:
+ sys.stdout.write(' %-6.2f'%(stamp-last))
+ last = stamp
+ print ' %-6.2f'%(last-first)
+ sys.stdout.flush()
+
+if __name__ == '__main__':
+ # 0 1 2 3 4 5 6
+ # 01234567890123456789012345678901234567890123456789012345678901234
+ print 'Test name fetch journl jprops lookup filter TOTAL '
+ for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
+ main(name)
+ for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
+ main(name, numissues=20)
+# for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
+# main(name, numissues=100)
+
diff --git a/test/test_db.py b/test/test_db.py
index c9226d22eb945e1b99fb0f844c04aa75ce4d6c2b..00e67e7c8dd1e968772074ca9055a239beecf82f 100644 (file)
--- a/test/test_db.py
+++ b/test/test_db.py
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: test_db.py,v 1.50 2002-09-19 02:37:41 richard Exp $
+# $Id: test_db.py,v 1.51 2002-09-20 01:20:32 richard Exp $
import unittest, os, shutil, time
self.db.user.set('1', age=None)
self.assertEqual(self.db.user.get('1', "age"), None)
+ def testKeyValue(self):
+ newid = self.db.user.create(username="spam")
+ self.assertEqual(self.db.user.lookup('spam'), newid)
+ self.db.commit()
+ self.assertEqual(self.db.user.lookup('spam'), newid)
+ self.db.user.retire(newid)
+ self.assertRaises(KeyError, self.db.user.lookup, 'spam')
+
def testNewProperty(self):
self.db.issue.create(title="spam", status='1')
self.db.issue.addprop(fixer=Link("user"))
unittest.makeSuite(anydbmDBTestCase, 'test'),
unittest.makeSuite(anydbmReadOnlyDBTestCase, 'test')
]
- #return unittest.TestSuite(l)
+# return unittest.TestSuite(l)
try:
import sqlite
l.append(unittest.makeSuite(sqliteReadOnlyDBTestCase, 'test'))
except:
print 'sqlite module not found, skipping gadfly DBTestCase'
+# return unittest.TestSuite(l)
try:
import gadfly