From 21b3acdf5b78d378f1f9044815f69f33bd2f168f Mon Sep 17 00:00:00 2001 From: richard Date: Mon, 23 Sep 2002 06:48:35 +0000 Subject: [PATCH] Some speedups - both of the SQL backends can handle using only one cursor. Better date unserialisation too. git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1207 57a73879-2fb5-44c3-a270-3262357dd7e2 --- TODO.txt | 1 + roundup/backends/back_gadfly.py | 46 ++++---- roundup/backends/back_sqlite.py | 35 +++---- roundup/backends/rdbms_common.py | 167 +++++++++++++----------------- roundup/cgi/templating.py | 11 ++ roundup/date.py | 21 ++-- roundup/scripts/roundup_server.py | 3 +- test/test_db.py | 9 +- 8 files changed, 140 insertions(+), 153 deletions(-) diff --git a/TODO.txt b/TODO.txt index 0fbb7b9..cbcf579 100644 --- a/TODO.txt +++ b/TODO.txt @@ -52,5 +52,6 @@ pending web allow multilink selections to select a "none" element to allow bug mailgw some f*ked mailers QUOTE their Re; "Re: "[issue1] bla blah"" bug docs need to mention somewhere how sorting works +bug import import isn't setting the journal props correctly ======= ========= ============================================================= diff --git a/roundup/backends/back_gadfly.py b/roundup/backends/back_gadfly.py index 76db52e..1896c62 100644 --- a/roundup/backends/back_gadfly.py +++ b/roundup/backends/back_gadfly.py @@ -1,4 +1,4 @@ -# $Id: back_gadfly.py,v 1.24 2002-09-20 01:20:31 richard Exp $ +# $Id: back_gadfly.py,v 1.25 2002-09-23 06:48:34 richard Exp $ __doc__ = ''' About Gadfly ============ @@ -71,43 +71,42 @@ class Database(Database): self.database_schema = {} self.conn = gadfly.gadfly() self.conn.startup(*db) - cursor = self.conn.cursor() - cursor.execute('create table schema (schema varchar)') - cursor.execute('create table ids (name varchar, num integer)') + self.cursor = self.conn.cursor() + self.cursor.execute('create table schema (schema varchar)') + self.cursor.execute('create table ids (name varchar, num integer)') else: - cursor = self.conn.cursor() - cursor.execute('select schema from schema') - self.database_schema = cursor.fetchone()[0] + self.cursor = self.conn.cursor() + self.cursor.execute('select schema from schema') + self.database_schema = self.cursor.fetchone()[0] else: self.conn = gadfly.client.gfclient(*db) - self.database_schema = self.load_dbschema(cursor) + self.database_schema = self.load_dbschema() def __repr__(self): return ''%id(self) - def sql_fetchone(self, cursor): + def sql_fetchone(self): ''' Fetch a single row. If there's nothing to fetch, return None. ''' try: - return cursor.fetchone() + return self.cursor.fetchone() except gadfly.database.error, message: if message == 'no more results': return None raise - def save_dbschema(self, cursor, schema): + def save_dbschema(self, schema): ''' Save the schema definition that the database currently implements ''' - self.sql(cursor, 'insert into schema values (?)', - (self.database_schema,)) + self.sql('insert into schema values (?)', (self.database_schema,)) - def load_dbschema(self, cursor): + def load_dbschema(self): ''' Load the schema definition that the database currently implements ''' - cursor.execute('select schema from schema') - return cursor.fetchone()[0] + self.cursor.execute('select schema from schema') + return self.cursor.fetchone()[0] - def save_journal(self, cursor, classname, cols, nodeid, journaldate, + def save_journal(self, classname, cols, nodeid, journaldate, journaltag, action, params): ''' Save the journal entry to the database ''' @@ -120,9 +119,9 @@ class Database(Database): cols) if __debug__: print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry) - cursor.execute(sql, entry) + self.cursor.execute(sql, entry) - def load_journal(self, cursor, classname, cols, nodeid): + def load_journal(self, classname, cols, nodeid): ''' Load the journal from the database ''' # now get the journal entries @@ -130,9 +129,9 @@ class Database(Database): self.arg) if __debug__: print >>hyperdb.DEBUG, 'getjournal', (self, sql, nodeid) - cursor.execute(sql, (nodeid,)) + self.cursor.execute(sql, (nodeid,)) res = [] - for nodeid, date_stamp, user, action, params in cursor.fetchall(): + for nodeid, date_stamp, user, action, params in self.cursor.fetchall(): res.append((nodeid, date.Date(date_stamp), user, action, params)) return res @@ -223,9 +222,8 @@ class GadflyClass: args = tuple(args) if __debug__: print >>hyperdb.DEBUG, 'filter', (self, sql, args) - cursor = self.db.conn.cursor() - cursor.execute(sql, args) - l = cursor.fetchall() + self.db.cursor.execute(sql, args) + l = self.db.cursor.fetchall() # return the IDs return [row[0] for row in l] diff --git a/roundup/backends/back_sqlite.py b/roundup/backends/back_sqlite.py index 965245f..e5f64b0 100644 --- a/roundup/backends/back_sqlite.py +++ b/roundup/backends/back_sqlite.py @@ -1,4 +1,4 @@ -# $Id: back_sqlite.py,v 1.3 2002-09-19 02:37:41 richard Exp $ +# $Id: back_sqlite.py,v 1.4 2002-09-23 06:48:35 richard Exp $ __doc__ = ''' See https://pysqlite.sourceforge.net/ for pysqlite info ''' @@ -15,24 +15,23 @@ class Database(Database): os.umask(0002) db = os.path.join(self.config.DATABASE, 'db') self.conn = sqlite.connect(db=db) - cursor = self.conn.cursor() + self.cursor = self.conn.cursor() try: - self.database_schema = self.load_dbschema(cursor) + self.database_schema = self.load_dbschema() except sqlite.DatabaseError, error: if str(error) != 'no such table: schema': raise self.database_schema = {} - cursor = self.conn.cursor() - cursor.execute('create table schema (schema varchar)') - cursor.execute('create table ids (name varchar, num integer)') + self.cursor.execute('create table schema (schema varchar)') + self.cursor.execute('create table ids (name varchar, num integer)') def __repr__(self): return ''%id(self) - def sql_fetchone(self, cursor): + def sql_fetchone(self): ''' Fetch a single row. If there's nothing to fetch, return None. ''' - return cursor.fetchone() + return self.cursor.fetchone() def sql_commit(self): ''' Actually commit to the database. @@ -45,19 +44,19 @@ class Database(Database): if str(error) != 'cannot commit - no transaction is active': raise - def save_dbschema(self, cursor, schema): + def save_dbschema(self, schema): ''' Save the schema definition that the database currently implements ''' s = repr(self.database_schema) - self.sql(cursor, 'insert into schema values (%s)', (s,)) + self.sql('insert into schema values (%s)', (s,)) - def load_dbschema(self, cursor): + def load_dbschema(self): ''' Load the schema definition that the database currently implements ''' - cursor.execute('select schema from schema') - return eval(cursor.fetchone()[0]) + self.cursor.execute('select schema from schema') + return eval(self.cursor.fetchone()[0]) - def save_journal(self, cursor, classname, cols, nodeid, journaldate, + def save_journal(self, classname, cols, nodeid, journaldate, journaltag, action, params): ''' Save the journal entry to the database ''' @@ -71,9 +70,9 @@ class Database(Database): cols, a, a, a, a, a) if __debug__: print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry) - cursor.execute(sql, entry) + self.cursor.execute(sql, entry) - def load_journal(self, cursor, classname, cols, nodeid): + def load_journal(self, classname, cols, nodeid): ''' Load the journal from the database ''' # now get the journal entries @@ -81,9 +80,9 @@ class Database(Database): self.arg) if __debug__: print >>hyperdb.DEBUG, 'getjournal', (self, sql, nodeid) - cursor.execute(sql, (nodeid,)) + self.cursor.execute(sql, (nodeid,)) res = [] - for nodeid, date_stamp, user, action, params in cursor.fetchall(): + for nodeid, date_stamp, user, action, params in self.cursor.fetchall(): params = eval(params) res.append((nodeid, date.Date(date_stamp), user, action, params)) return res diff --git a/roundup/backends/rdbms_common.py b/roundup/backends/rdbms_common.py index e53b965..29f0ca7 100644 --- a/roundup/backends/rdbms_common.py +++ b/roundup/backends/rdbms_common.py @@ -1,4 +1,4 @@ -# $Id: rdbms_common.py,v 1.10 2002-09-23 00:50:32 richard Exp $ +# $Id: rdbms_common.py,v 1.11 2002-09-23 06:48:35 richard Exp $ # standard python modules import sys, os, time, re, errno, weakref, copy @@ -53,17 +53,17 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' raise NotImplemented - def sql(self, cursor, sql, args=None): + def sql(self, sql, args=None): ''' Execute the sql with the optional args. ''' if __debug__: print >>hyperdb.DEBUG, (self, sql, args) if args: - cursor.execute(sql, args) + self.cursor.execute(sql, args) else: - cursor.execute(sql) + self.cursor.execute(sql) - def sql_fetchone(self, cursor): + def sql_fetchone(self): ''' Fetch a single row. If there's nothing to fetch, return None. ''' raise NotImplemented @@ -73,12 +73,12 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' return re.sub("'", "''", str(value)) - def save_dbschema(self, cursor, schema): + def save_dbschema(self, schema): ''' Save the schema definition that the database currently implements ''' raise NotImplemented - def load_dbschema(self, cursor): + def load_dbschema(self): ''' Load the schema definition that the database currently implements ''' raise NotImplemented @@ -108,9 +108,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # update the database version of the schema if save: - cursor = self.conn.cursor() - self.sql(cursor, 'delete from schema') - self.save_dbschema(cursor, self.database_schema) + self.sql('delete from schema') + self.save_dbschema(self.database_schema) # reindex the db if necessary if self.indexer.should_reindex(): @@ -180,9 +179,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): dbspec_propnames.append(propname) dbspec_props[propname] = prop - # we're going to need one of these - cursor = self.conn.cursor() - # now compare for propname in spec_propnames: prop = spec_props[propname] @@ -195,7 +191,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # add the property if isinstance(prop, Multilink): # all we have to do here is create a new table, easy! - self.create_multilink_table(cursor, spec, propname) + self.create_multilink_table(spec, propname) continue # no ALTER TABLE, so we: @@ -207,14 +203,14 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'select %s,%s from _%s'%(','.join(oldcols), self.arg, cn) if __debug__: print >>hyperdb.DEBUG, 'update_class', (self, sql, None) - cursor.execute(sql, (None,)) - olddata = cursor.fetchall() + self.cursor.execute(sql, (None,)) + olddata = self.cursor.fetchall() # 2. drop the old table - cursor.execute('drop table _%s'%cn) + self.cursor.execute('drop table _%s'%cn) # 3. create the new table - cols, mls = self.create_class_table(cursor, spec) + cols, mls = self.create_class_table(spec) # ensure the new column is last cols.remove('_'+propname) assert oldcols == cols, "Column lists don't match!" @@ -232,7 +228,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # do the insert for row in olddata: - self.sql(cursor, sql, tuple(row)) + self.sql(sql, tuple(row)) else: # modify the property @@ -253,7 +249,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'drop table %s_%s'%(spec.classname, prop) if __debug__: print >>hyperdb.DEBUG, 'update_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) else: # no ALTER TABLE, so we: # 1. pull out the data, excluding the removed column @@ -264,23 +260,23 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): oldcols.remove('_'+propname) cn = spec.classname sql = 'select %s from _%s'%(','.join(oldcols), cn) - cursor.execute(sql, (None,)) + self.cursor.execute(sql, (None,)) olddata = sql.fetchall() # 2. drop the old table - cursor.execute('drop table _%s'%cn) + self.cursor.execute('drop table _%s'%cn) # 3. create the new table - cols, mls = self.create_class_table(self, cursor, spec) + cols, mls = self.create_class_table(self, spec) assert oldcols != cols, "Column lists don't match!" # 4. populate with the data from step one qs = ','.join([self.arg for x in cols]) sql = 'insert into _%s values (%s)'%(cn, s) - cursor.execute(sql, olddata) + self.cursor.execute(sql, olddata) return 1 - def create_class_table(self, cursor, spec): + def create_class_table(self, spec): ''' create the class table for the given spec ''' cols, mls = self.determine_columns(spec.properties.items()) @@ -294,11 +290,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'create table _%s (%s)'%(spec.classname, scols) if __debug__: print >>hyperdb.DEBUG, 'create_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) return cols, mls - def create_journal_table(self, cursor, spec): + def create_journal_table(self, spec): ''' create the journal table for a class given the spec and already-determined cols ''' @@ -308,9 +304,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'create table %s__journal (%s)'%(spec.classname, cols) if __debug__: print >>hyperdb.DEBUG, 'create_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) - def create_multilink_table(self, cursor, spec, ml): + def create_multilink_table(self, spec, ml): ''' Create a multilink table for the "ml" property of the class given by the spec ''' @@ -318,25 +314,24 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): spec.classname, ml) if __debug__: print >>hyperdb.DEBUG, 'create_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) def create_class(self, spec): ''' Create a database table according to the given spec. ''' - cursor = self.conn.cursor() - cols, mls = self.create_class_table(cursor, spec) - self.create_journal_table(cursor, spec) + cols, mls = self.create_class_table(spec) + self.create_journal_table(spec) # now create the multilink tables for ml in mls: - self.create_multilink_table(cursor, spec, ml) + self.create_multilink_table(spec, ml) # ID counter sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) vals = (spec.classname, 1) if __debug__: print >>hyperdb.DEBUG, 'create_class', (self, sql, vals) - cursor.execute(sql, vals) + self.cursor.execute(sql, vals) def drop_class(self, spec): ''' Drop the given table from the database. @@ -348,23 +343,22 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): for col, prop in spec.properties.items(): if isinstance(prop, Multilink): mls.append(col) - cursor = self.conn.cursor() sql = 'drop table _%s'%spec.classname if __debug__: print >>hyperdb.DEBUG, 'drop_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) sql = 'drop table %s__journal'%spec.classname if __debug__: print >>hyperdb.DEBUG, 'drop_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) for ml in mls: sql = 'drop table %s_%s'%(spec.classname, ml) if __debug__: print >>hyperdb.DEBUG, 'drop_class', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) # # Classes @@ -417,12 +411,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' if __debug__: print >>hyperdb.DEBUG, 'clear', (self,) - cursor = self.conn.cursor() for cn in self.classes.keys(): sql = 'delete from _%s'%cn if __debug__: print >>hyperdb.DEBUG, 'clear', (self, sql) - cursor.execute(sql) + self.cursor.execute(sql) # # Node IDs @@ -431,19 +424,18 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' Generate a new id for the given class ''' # get the next ID - cursor = self.conn.cursor() sql = 'select num from ids where name=%s'%self.arg if __debug__: print >>hyperdb.DEBUG, 'newid', (self, sql, classname) - cursor.execute(sql, (classname, )) - newid = cursor.fetchone()[0] + self.cursor.execute(sql, (classname, )) + newid = self.cursor.fetchone()[0] # update the counter sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) vals = (int(newid)+1, classname) if __debug__: print >>hyperdb.DEBUG, 'newid', (self, sql, vals) - cursor.execute(sql, vals) + self.cursor.execute(sql, vals) # return as string return str(newid) @@ -451,12 +443,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def setid(self, classname, setid): ''' Set the id counter: used during import of database ''' - cursor = self.conn.cursor() sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) vals = (setid, classname) if __debug__: print >>hyperdb.DEBUG, 'setid', (self, sql, vals) - cursor.execute(sql, vals) + self.cursor.execute(sql, vals) # # Nodes @@ -497,11 +488,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cols = ','.join(cols) + ',id,__retired__' # perform the inserts - cursor = self.conn.cursor() sql = 'insert into _%s (%s) values (%s)'%(classname, cols, s) if __debug__: print >>hyperdb.DEBUG, 'addnode', (self, sql, vals) - cursor.execute(sql, vals) + self.cursor.execute(sql, vals) # insert the multilink rows for col in mls: @@ -509,7 +499,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): for entry in node[col]: sql = 'insert into %s (linkid, nodeid) values (%s,%s)'%(t, self.arg, self.arg) - self.sql(cursor, sql, (entry, nodeid)) + self.sql(sql, (entry, nodeid)) # make sure we do the commit-time extra stuff for this node self.transactions.append((self.doSaveNode, (classname, nodeid, node))) @@ -546,8 +536,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cols.append('_'+col) cols.sort() - cursor = self.conn.cursor() - # if there's any updates to regular columns, do them if cols: # make sure the ordering is correct for column name -> column value @@ -559,7 +547,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'update _%s set %s where id=%s'%(classname, s, self.arg) if __debug__: print >>hyperdb.DEBUG, 'setnode', (self, sql, sqlvals) - cursor.execute(sql, sqlvals) + self.cursor.execute(sql, sqlvals) # now the fun bit, updating the multilinks ;) for col, (add, remove) in multilink_changes.items(): @@ -568,12 +556,12 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn, self.arg, self.arg) for addid in add: - self.sql(cursor, sql, (nodeid, addid)) + self.sql(sql, (nodeid, addid)) if remove: sql = 'delete from %s where nodeid=%s and linkid=%s'%(tn, self.arg, self.arg) for removeid in remove: - self.sql(cursor, sql, (nodeid, removeid)) + self.sql(sql, (nodeid, removeid)) # make sure we do the commit-time extra stuff for this node self.transactions.append((self.doSaveNode, (classname, nodeid, values))) @@ -599,11 +587,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): scols = ','.join(cols) # perform the basic property fetch - cursor = self.conn.cursor() sql = 'select %s from _%s where id=%s'%(scols, classname, self.arg) - self.sql(cursor, sql, (nodeid,)) + self.sql(sql, (nodeid,)) - values = self.sql_fetchone(cursor) + values = self.sql_fetchone() if values is None: raise IndexError, 'no such %s node %s'%(classname, nodeid) @@ -617,9 +604,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # get the link ids sql = 'select linkid from %s_%s where nodeid=%s'%(classname, col, self.arg) - cursor.execute(sql, (nodeid,)) + self.cursor.execute(sql, (nodeid,)) # extract the first column from the result - node[col] = [x[0] for x in cursor.fetchall()] + node[col] = [x[0] for x in self.cursor.fetchall()] # un-dbificate the node data node = self.unserialise(classname, node) @@ -654,9 +641,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): self.transactions.remove(entry) # now do the SQL - cursor = self.conn.cursor() sql = 'delete from _%s where id=%s'%(classname, self.arg) - self.sql(cursor, sql, (nodeid,)) + self.sql(sql, (nodeid,)) # remove from multilnks cl = self.getclass(classname) @@ -664,11 +650,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): for col in mls: # get the link ids sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg) - cursor.execute(sql, (nodeid,)) + self.cursor.execute(sql, (nodeid,)) # remove journal entries sql = 'delete from %s__journal where nodeid=%s'%(classname, self.arg) - self.sql(cursor, sql, (nodeid,)) + self.sql(sql, (nodeid,)) def serialise(self, classname, node): '''Copy the node contents, converting non-marshallable data into @@ -730,22 +716,20 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def hasnode(self, classname, nodeid): ''' Determine if the database has a given node. ''' - cursor = self.conn.cursor() sql = 'select count(*) from _%s where id=%s'%(classname, self.arg) if __debug__: print >>hyperdb.DEBUG, 'hasnode', (self, sql, nodeid) - cursor.execute(sql, (nodeid,)) - return int(cursor.fetchone()[0]) + self.cursor.execute(sql, (nodeid,)) + return int(self.cursor.fetchone()[0]) def countnodes(self, classname): ''' Count the number of nodes that exist for a particular Class. ''' - cursor = self.conn.cursor() sql = 'select count(*) from _%s'%classname if __debug__: print >>hyperdb.DEBUG, 'countnodes', (self, sql) - cursor.execute(sql) - return cursor.fetchone()[0] + self.cursor.execute(sql) + return self.cursor.fetchone()[0] def getnodeids(self, classname, retired=0): ''' Retrieve all the ids of the nodes for a particular Class. @@ -753,15 +737,14 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): Set retired=None to get all nodes. Otherwise it'll get all the retired or non-retired nodes, depending on the flag. ''' - cursor = self.conn.cursor() # flip the sense of the flag if we don't want all of them if retired is not None: retired = not retired sql = 'select id from _%s where __retired__ <> %s'%(classname, self.arg) if __debug__: print >>hyperdb.DEBUG, 'getnodeids', (self, sql, retired) - cursor.execute(sql, (retired,)) - return [x[0] for x in cursor.fetchall()] + self.cursor.execute(sql, (retired,)) + return [x[0] for x in self.cursor.fetchall()] def addjournal(self, classname, nodeid, action, params, creator=None, creation=None): @@ -795,11 +778,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): print >>hyperdb.DEBUG, 'addjournal', (nodeid, journaldate, journaltag, action, params) - cursor = self.conn.cursor() - self.save_journal(cursor, classname, cols, nodeid, journaldate, + self.save_journal(classname, cols, nodeid, journaldate, journaltag, action, params) - def save_journal(self, cursor, classname, cols, nodeid, journaldate, + def save_journal(self, classname, cols, nodeid, journaldate, journaltag, action, params): ''' Save the journal entry to the database ''' @@ -812,11 +794,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): if not self.hasnode(classname, nodeid): raise IndexError, '%s has no node %s'%(classname, nodeid) - cursor = self.conn.cursor() cols = ','.join('nodeid date tag action params'.split()) - return self.load_journal(cursor, classname, cols, nodeid) + return self.load_journal(classname, cols, nodeid) - def load_journal(self, cursor, classname, cols, nodeid): + def load_journal(self, classname, cols, nodeid): ''' Load the journal from the database ''' raise NotImplemented @@ -828,13 +809,12 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): date_stamp = pack_before.serialise() # do the delete - cursor = self.conn.cursor() for classname in self.classes.keys(): sql = "delete from %s__journal where date<%s and "\ "action<>'create'"%(classname, self.arg) if __debug__: print >>hyperdb.DEBUG, 'pack', (self, sql, date_stamp) - cursor.execute(sql, (date_stamp,)) + self.cursor.execute(sql, (date_stamp,)) def sql_commit(self): ''' Actually commit to the database. @@ -1470,23 +1450,21 @@ class Class(hyperdb.Class): if self.db.journaltag is None: raise DatabaseError, 'Database open read-only' - cursor = self.db.conn.cursor() sql = 'update _%s set __retired__=1 where id=%s'%(self.classname, self.db.arg) if __debug__: print >>hyperdb.DEBUG, 'retire', (self, sql, nodeid) - cursor.execute(sql, (nodeid,)) + self.db.cursor.execute(sql, (nodeid,)) def is_retired(self, nodeid): '''Return true if the node is rerired ''' - cursor = self.db.conn.cursor() sql = 'select __retired__ from _%s where id=%s'%(self.classname, self.db.arg) if __debug__: print >>hyperdb.DEBUG, 'is_retired', (self, sql, nodeid) - cursor.execute(sql, (nodeid,)) - return int(cursor.fetchone()[0]) + self.db.cursor.execute(sql, (nodeid,)) + return int(self.db.sql_fetchone()[0]) def destroy(self, nodeid): '''Destroy a node. @@ -1584,13 +1562,12 @@ class Class(hyperdb.Class): if not self.key: raise TypeError, 'No key property set for class %s'%self.classname - cursor = self.db.conn.cursor() sql = 'select id,__retired__ from _%s where _%s=%s'%(self.classname, self.key, self.db.arg) - self.db.sql(cursor, sql, (keyvalue,)) + self.db.sql(sql, (keyvalue,)) # see if there was a result that's not retired - l = cursor.fetchall() + l = self.db.cursor.fetchall() if not l or int(l[0][1]): raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, keyvalue, self.classname) @@ -1626,12 +1603,9 @@ class Class(hyperdb.Class): tables.append('select nodeid from %s_%s where linkid in (%s)'%( self.classname, prop, ','.join([a for x in values.keys()]))) sql = '\nintersect\n'.join(tables) - if __debug__: - print >>hyperdb.DEBUG, 'find', (self, sql, allvalues) - cursor = self.db.conn.cursor() - cursor.execute(sql, allvalues) + self.db.sql(sql, allvalues) try: - l = [x[0] for x in cursor.fetchall()] + l = [x[0] for x in self.db.cursor.fetchall()] except gadfly.database.error, message: if message == 'no more results': l = [] @@ -1772,9 +1746,8 @@ class Class(hyperdb.Class): args = tuple(args) if __debug__: print >>hyperdb.DEBUG, 'filter', (self, sql, args) - cursor = self.db.conn.cursor() - cursor.execute(sql, args) - l = cursor.fetchall() + self.db.cursor.execute(sql, args) + l = self.db.cursor.fetchall() # return the IDs (the first column) # XXX The filter(None, l) bit is sqlite-specific... if there's _NO_ diff --git a/roundup/cgi/templating.py b/roundup/cgi/templating.py index 781afb0..72a0c4a 100644 --- a/roundup/cgi/templating.py +++ b/roundup/cgi/templating.py @@ -63,6 +63,17 @@ templates = {} class NoTemplate(Exception): pass +def precompileTemplates(dir): + ''' Go through a directory and precompile all the templates therein + ''' + for filename in os.listdir(dir): + if os.path.isdir(filename): continue + if '.' in filename: + name, extension = filename.split('.') + getTemplate(dir, name, extension) + else: + getTemplate(dir, filename, None) + def getTemplate(dir, name, extension, classname=None, request=None): ''' Interface to get a template, possibly loading a compiled template. diff --git a/roundup/date.py b/roundup/date.py index 1cc6464..e048927 100644 --- a/roundup/date.py +++ b/roundup/date.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: date.py,v 1.28 2002-09-10 12:44:42 richard Exp $ +# $Id: date.py,v 1.29 2002-09-23 06:48:34 richard Exp $ __doc__ = """ Date, time and time interval handling. @@ -220,17 +220,22 @@ class Date: (((?P\d?\d):(?P\d\d))?(:(?P\d\d))?)? # hh:mm:ss (?P.+)? # offset ''', re.VERBOSE), serialised_re=re.compile(''' - (?P\d{4})(?P\d{2})(?P\d{2}) # yyyymmdd - (?P\d{2})(?P\d{2})(?P\d{2}) # HHMMSS - ''', re.VERBOSE)): + (\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2}) + ''')): ''' set the date to the value in spec ''' m = serialised_re.match(spec) + if m: + # we're serialised - easy! + self.year, self.month, self.day, self.hour, self.minute, \ + self.second = map(int, m.groups()[1:7]) + return + + # not serialised data, try usual format + m = date_re.match(spec) if not m: - m = date_re.match(spec) - if not m: - raise ValueError, _('Not a date spec: [[yyyy-]mm-dd].' - '[[h]h:mm[:ss]][offset]') + raise ValueError, _('Not a date spec: [[yyyy-]mm-dd].' + '[[h]h:mm[:ss]][offset]') info = m.groupdict() diff --git a/roundup/scripts/roundup_server.py b/roundup/scripts/roundup_server.py index 8b49efc..72d7d38 100644 --- a/roundup/scripts/roundup_server.py +++ b/roundup/scripts/roundup_server.py @@ -16,7 +16,7 @@ # """ HTTP Server that serves roundup. -$Id: roundup_server.py,v 1.11 2002-09-23 00:50:32 richard Exp $ +$Id: roundup_server.py,v 1.12 2002-09-23 06:48:35 richard Exp $ """ # python version check @@ -59,7 +59,6 @@ ROUNDUP_USER = None ## end configuration # - class RoundupRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): TRACKER_HOMES = TRACKER_HOMES ROUNDUP_USER = ROUNDUP_USER diff --git a/test/test_db.py b/test/test_db.py index 33bc69c..2af0a5c 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: test_db.py,v 1.53 2002-09-20 19:26:28 gmcm Exp $ +# $Id: test_db.py,v 1.54 2002-09-23 06:48:35 richard Exp $ import unittest, os, shutil, time @@ -413,6 +413,7 @@ class anydbmDBTestCase(MyTestCase): (x, date_stamp2, x, x, x) = entry # see if the change was journalled when it shouldn't have been self.assertEqual(date_stamp, date_stamp2) + time.sleep(1) self.db.issue.enableJournalling() self.db.issue.set('1', title='hello world 2') self.db.commit() @@ -428,9 +429,9 @@ class anydbmDBTestCase(MyTestCase): self.db.commit() # sleep for at least a second, then get a date to pack at - time.sleep(1) + time.sleep(2) pack_before = date.Date('.') - time.sleep(1) + time.sleep(2) # one more entry self.db.issue.set(id, status='3') @@ -718,7 +719,7 @@ def suite(): unittest.makeSuite(anydbmDBTestCase, 'test'), unittest.makeSuite(anydbmReadOnlyDBTestCase, 'test') ] -# return unittest.TestSuite(l) + return unittest.TestSuite(l) try: import gadfly -- 2.30.2