From: richard Date: Mon, 22 Mar 2004 07:45:40 +0000 (+0000) Subject: Implemented proper datatypes in mysql and postgresql backends (well, X-Git-Url: https://git.tokkee.org/?a=commitdiff_plain;h=f56765afa21807caa7d68958d6bcbc2f5c37fba3;p=roundup.git Implemented proper datatypes in mysql and postgresql backends (well, sqlite too, but that doesn't care). Probably should use BOOLEAN instead of INTEGER for the Boolean props. Need to fix a bizzaro MySQL error (gee, how unusual) Need to finish MySQL migration from "version 1" database schemas. git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@2166 57a73879-2fb5-44c3-a270-3262357dd7e2 --- diff --git a/roundup/backends/back_metakit.py b/roundup/backends/back_metakit.py index a0d8757..6e41516 100755 --- a/roundup/backends/back_metakit.py +++ b/roundup/backends/back_metakit.py @@ -1,4 +1,4 @@ -# $Id: back_metakit.py,v 1.66 2004-03-21 23:39:08 richard Exp $ +# $Id: back_metakit.py,v 1.67 2004-03-22 07:45:39 richard Exp $ '''Metakit backend for Roundup, originally by Gordon McMillan. Known Current Bugs: @@ -39,7 +39,7 @@ This backend has some behaviour specific to metakit: __docformat__ = 'restructuredtext' # Enable this flag to break backwards compatibility (i.e. can't read old # databases) but comply with more roundup features, like adding NULL support. -BACKWARDS_COMPATIBLE = True +BACKWARDS_COMPATIBLE = 1 from roundup import hyperdb, date, password, roundupdb, security import metakit diff --git a/roundup/backends/back_mysql.py b/roundup/backends/back_mysql.py index c1d1680..d5e5edc 100644 --- a/roundup/backends/back_mysql.py +++ b/roundup/backends/back_mysql.py @@ -6,7 +6,31 @@ # disclaimer are retained in their original form. # -'''This module defines a backend implementation for MySQL.''' +'''This module defines a backend implementation for MySQL. + + +How to implement AUTO_INCREMENT: + +mysql> create table foo (num integer auto_increment primary key, name +varchar(255)) AUTO_INCREMENT=1 type=InnoDB; + +ql> insert into foo (name) values ('foo5'); +Query OK, 1 row affected (0.00 sec) + +mysql> SELECT num FROM foo WHERE num IS NULL; ++-----+ +| num | ++-----+ +| 4 | ++-----+ +1 row in set (0.00 sec) + +mysql> SELECT num FROM foo WHERE num IS NULL; +Empty set (0.00 sec) + +NOTE: we don't need an index on the id column if it's PRIMARY KEY + +''' __docformat__ = 'restructuredtext' from roundup.backends.rdbms_common import * @@ -87,6 +111,17 @@ class Database(Database): mysql_backend = 'InnoDB' #mysql_backend = 'BDB' + hyperdb_to_sql_value = { + hyperdb.String : str, + # no fractional seconds for MySQL + hyperdb.Date : lambda x: x.formal(sep=' '), + hyperdb.Link : int, + hyperdb.Interval : lambda x: x.serialise(), + hyperdb.Password : str, + hyperdb.Boolean : int, + hyperdb.Number : lambda x: x, + } + def sql_open_connection(self): db = getattr(self.config, 'MYSQL_DATABASE') try: @@ -116,43 +151,113 @@ class Database(Database): self.init_dbschema() self.sql("CREATE TABLE schema (schema TEXT) TYPE=%s"% self.mysql_backend) - # TODO: use AUTO_INCREMENT for generating ids: - # http://www.mysql.com/doc/en/CREATE_TABLE.html - self.sql("CREATE TABLE ids (name varchar(255), num INT) TYPE=%s"% - self.mysql_backend) - self.sql("CREATE INDEX ids_name_idx ON ids(name)") + self.cursor.execute('''CREATE TABLE ids (name VARCHAR(255), + num INTEGER) TYPE=%s'''%self.mysql_backend) + self.cursor.execute('create index ids_name_idx on ids(name)') self.create_version_2_tables() def create_version_2_tables(self): # OTK store - self.cursor.execute('CREATE TABLE otks (otk_key VARCHAR(255), ' - 'otk_value VARCHAR(255), otk_time FLOAT(20)) ' - 'TYPE=%s'%self.mysql_backend) + self.cursor.execute('''CREATE TABLE otks (otk_key VARCHAR(255), + otk_value VARCHAR(255), otk_time FLOAT(20)) + TYPE=%s'''%self.mysql_backend) self.cursor.execute('CREATE INDEX otks_key_idx ON otks(otk_key)') # Sessions store - self.cursor.execute('CREATE TABLE sessions (session_key VARCHAR(255), ' - 'session_time FLOAT(20), session_value VARCHAR(255)) ' - 'TYPE=%s'%self.mysql_backend) - self.cursor.execute('CREATE INDEX sessions_key_idx ON ' - 'sessions(session_key)') + self.cursor.execute('''CREATE TABLE sessions ( + session_key VARCHAR(255), session_time FLOAT(20), + session_value VARCHAR(255)) TYPE=%s'''%self.mysql_backend) + self.cursor.execute('''CREATE INDEX sessions_key_idx ON + sessions(session_key)''') # full-text indexing store - self.cursor.execute('CREATE TABLE _textids (_class VARCHAR(255), ' - '_itemid VARCHAR(255), _prop VARCHAR(255), _textid INT) ' - 'TYPE=%s'%self.mysql_backend) - self.cursor.execute('CREATE TABLE _words (_word VARCHAR(30), ' - '_textid INT) TYPE=%s'%self.mysql_backend) - self.cursor.execute('CREATE INDEX words_word_ids ON _words(_word)') + self.cursor.execute('''CREATE TABLE __textids (_class VARCHAR(255), + _itemid VARCHAR(255), _prop VARCHAR(255), _textid INT) + TYPE=%s'''%self.mysql_backend) + self.cursor.execute('''CREATE TABLE __words (_word VARCHAR(30), + _textid INT) TYPE=%s'''%self.mysql_backend) + self.cursor.execute('CREATE INDEX words_word_ids ON __words(_word)') sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) - self.cursor.execute(sql, ('_textids', 1)) + self.cursor.execute(sql, ('__textids', 1)) def add_actor_column(self): - # update existing tables to have the new actor column - tables = self.database_schema['tables'] - for name in tables.keys(): - self.cursor.execute('ALTER TABLE _%s add __actor ' - 'VARCHAR(255)'%name) + ''' While we're adding the actor column, we need to update the + tables to have the correct datatypes.''' + assert 0, 'FINISH ME!' + + for spec in self.classes.values(): + new_has = spec.properties.has_key + new_spec = spec.schema() + new_spec[1].sort() + old_spec[1].sort() + if not force and new_spec == old_spec: + # no changes + return 0 + + if __debug__: + print >>hyperdb.DEBUG, 'update_class FIRING' + + # detect multilinks that have been removed, and drop their table + old_has = {} + for name,prop in old_spec[1]: + old_has[name] = 1 + if new_has(name) or not isinstance(prop, hyperdb.Multilink): + continue + # it's a multilink, and it's been removed - drop the old + # table. First drop indexes. + self.drop_multilink_table_indexes(spec.classname, ml) + sql = 'drop table %s_%s'%(spec.classname, prop) + if __debug__: + print >>hyperdb.DEBUG, 'update_class', (self, sql) + self.cursor.execute(sql) + old_has = old_has.has_key + + # now figure how we populate the new table + if adding_actor: + fetch = ['_activity', '_creation', '_creator'] + else: + fetch = ['_actor', '_activity', '_creation', '_creator'] + properties = spec.getprops() + for propname,x in new_spec[1]: + prop = properties[propname] + if isinstance(prop, hyperdb.Multilink): + if force or not old_has(propname): + # we need to create the new table + self.create_multilink_table(spec, propname) + elif old_has(propname): + # we copy this col over from the old table + fetch.append('_'+propname) + + # select the data out of the old table + fetch.append('id') + fetch.append('__retired__') + fetchcols = ','.join(fetch) + cn = spec.classname + sql = 'select %s from _%s'%(fetchcols, cn) + if __debug__: + print >>hyperdb.DEBUG, 'update_class', (self, sql) + self.cursor.execute(sql) + olddata = self.cursor.fetchall() + + # TODO: update all the other index dropping code + self.drop_class_table_indexes(cn, old_spec[0]) + + # drop the old table + self.cursor.execute('drop table _%s'%cn) + + # create the new table + self.create_class_table(spec) + + # do the insert of the old data - the new columns will have + # NULL values + args = ','.join([self.arg for x in fetch]) + sql = 'insert into _%s (%s) values (%s)'%(cn, fetchcols, args) + if __debug__: + print >>hyperdb.DEBUG, 'update_class', (self, sql, olddata[0]) + for entry in olddata: + self.cursor.execute(sql, tuple(entry)) + + return 1 def __repr__(self): return ''%id(self) @@ -174,40 +279,21 @@ class Database(Database): s = repr(self.database_schema) self.sql('INSERT INTO schema VALUES (%s)', (s,)) - def save_journal(self, classname, cols, nodeid, journaldate, - journaltag, action, params): - params = repr(params) - entry = (nodeid, journaldate, journaltag, action, params) - - a = self.arg - sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(classname, - cols, a, a, a, a, a) - if __debug__: - print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry) - self.cursor.execute(sql, entry) - - def load_journal(self, classname, cols, nodeid): - sql = 'select %s from %s__journal where nodeid=%s'%(cols, classname, - self.arg) - if __debug__: - print >>hyperdb.DEBUG, 'getjournal', (self, sql, nodeid) - self.cursor.execute(sql, (nodeid,)) - res = [] - for nodeid, date_stamp, user, action, params in self.cursor.fetchall(): - params = eval(params) - res.append((nodeid, date.Date(date_stamp), user, action, params)) - return res - def create_class_table(self, spec): cols, mls = self.determine_columns(spec.properties.items()) - cols.append('id') - cols.append('__retired__') - scols = ',' . join(['`%s` VARCHAR(255)'%x for x in cols]) - sql = 'CREATE TABLE `_%s` (%s) TYPE=%s'%(spec.classname, scols, + + # add on our special columns + cols.append(('id', 'INTEGER PRIMARY KEY')) + cols.append(('__retired__', 'INTEGER DEFAULT 0')) + + # create the base table + scols = ','.join(['%s %s'%x for x in cols]) + sql = 'create table _%s (%s) type=%s'%(spec.classname, scols, self.mysql_backend) if __debug__: - print >>hyperdb.DEBUG, 'create_class', (self, sql) + print >>hyperdb.DEBUG, 'create_class', (self, sql) self.cursor.execute(sql) + self.create_class_table_indexes(spec) return cols, mls @@ -227,12 +313,15 @@ class Database(Database): self.cursor.execute(index_sql) def create_journal_table(self, spec): - cols = ',' . join(['`%s` VARCHAR(255)'%x - for x in 'nodeid date tag action params' . split()]) - sql = 'CREATE TABLE `%s__journal` (%s) TYPE=%s'%(spec.classname, - cols, self.mysql_backend) + # journal table + cols = ','.join(['%s varchar'%x + for x in 'nodeid date tag action params'.split()]) + sql = '''create table %s__journal ( + nodeid integer, date timestamp, tag varchar(255), + action varchar(255), params varchar(255)) type=%s'''%( + spec.classname, self.mysql_backend) if __debug__: - print >>hyperdb.DEBUG, 'create_class', (self, sql) + print >>hyperdb.DEBUG, 'create_journal_table', (self, sql) self.cursor.execute(sql) self.create_journal_table_indexes(spec) @@ -278,6 +367,46 @@ class Database(Database): print >>hyperdb.DEBUG, 'drop_index', (self, sql) self.cursor.execute(sql) + # old-skool id generation + def newid(self, classname): + ''' Generate a new id for the given class + ''' + # get the next ID + sql = 'select num from ids where name=%s'%self.arg + if __debug__: + print >>hyperdb.DEBUG, 'newid', (self, sql, classname) + self.cursor.execute(sql, (classname, )) + newid = int(self.cursor.fetchone()[0]) + + # update the counter + sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) + vals = (int(newid)+1, classname) + if __debug__: + print >>hyperdb.DEBUG, 'newid', (self, sql, vals) + self.cursor.execute(sql, vals) + + # return as string + return str(newid) + + def setid(self, classname, setid): + ''' Set the id counter: used during import of database + + We add one to make it behave like the seqeunces in postgres. + ''' + sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) + vals = (int(setid)+1, classname) + if __debug__: + print >>hyperdb.DEBUG, 'setid', (self, sql, vals) + self.cursor.execute(sql, vals) + + def create_class(self, spec): + rdbms_common.Database.create_class(self, spec) + sql = 'insert into ids (name, num) values (%s, %s)' + vals = (spec.classname, 1) + if __debug__: + print >>hyperdb.DEBUG, 'create_class', (self, sql, vals) + self.cursor.execute(sql, vals) + class MysqlClass: # we're overriding this method for ONE missing bit of functionality. # look for "I can't believe it's not a toy RDBMS" below @@ -486,7 +615,8 @@ class MysqlClass: l = self.db.cursor.fetchall() # return the IDs (the first column) - return [row[0] for row in l] + # XXX numeric ids + return [str(row[0]) for row in l] class Class(MysqlClass, rdbms_common.Class): pass diff --git a/roundup/backends/back_postgresql.py b/roundup/backends/back_postgresql.py index a8537df..4882492 100644 --- a/roundup/backends/back_postgresql.py +++ b/roundup/backends/back_postgresql.py @@ -108,30 +108,31 @@ class Database(rdbms_common.Database): self.rollback() self.init_dbschema() self.sql("CREATE TABLE schema (schema TEXT)") - self.sql("CREATE TABLE ids (name VARCHAR(255), num INT4)") - self.sql("CREATE INDEX ids_name_idx ON ids(name)") + self.sql("CREATE TABLE dual (dummy integer)") + self.sql("insert into dual values (1)") self.create_version_2_tables() def create_version_2_tables(self): # OTK store - self.cursor.execute('CREATE TABLE otks (otk_key VARCHAR(255), ' - 'otk_value VARCHAR(255), otk_time FLOAT(20))') + self.cursor.execute('''CREATE TABLE otks (otk_key VARCHAR(255), + otk_value VARCHAR(255), otk_time FLOAT(20))''') self.cursor.execute('CREATE INDEX otks_key_idx ON otks(otk_key)') # Sessions store - self.cursor.execute('CREATE TABLE sessions (session_key VARCHAR(255), ' - 'session_time FLOAT(20), session_value VARCHAR(255))') - self.cursor.execute('CREATE INDEX sessions_key_idx ON ' - 'sessions(session_key)') + self.cursor.execute('''CREATE TABLE sessions ( + session_key VARCHAR(255), session_time FLOAT(20), + session_value VARCHAR(255))''') + self.cursor.execute('''CREATE INDEX sessions_key_idx ON + sessions(session_key)''') # full-text indexing store - self.cursor.execute('CREATE TABLE _textids (_class VARCHAR(255), ' - '_itemid VARCHAR(255), _prop VARCHAR(255), _textid INT4) ') - self.cursor.execute('CREATE TABLE _words (_word VARCHAR(30), ' - '_textid INT4)') - self.cursor.execute('CREATE INDEX words_word_ids ON _words(_word)') - sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) - self.cursor.execute(sql, ('_textids', 1)) + self.cursor.execute('CREATE SEQUENCE ___textids_ids') + self.cursor.execute('''CREATE TABLE __textids ( + _textid integer primary key, _class VARCHAR(255), + _itemid VARCHAR(255), _prop VARCHAR(255))''') + self.cursor.execute('''CREATE TABLE __words (_word VARCHAR(30), + _textid integer)''') + self.cursor.execute('CREATE INDEX words_word_idx ON __words(_word)') def add_actor_column(self): # update existing tables to have the new actor column @@ -155,16 +156,23 @@ class Database(rdbms_common.Database): return self.cursor.fetchone()[0] def create_class_table(self, spec): - cols, mls = self.determine_columns(spec.properties.items()) - cols.append('id') - cols.append('__retired__') - scols = ',' . join(['"%s" VARCHAR(255)'%x for x in cols]) - sql = 'CREATE TABLE "_%s" (%s)' % (spec.classname, scols) + sql = 'CREATE SEQUENCE _%s_ids'%spec.classname if __debug__: print >>hyperdb.DEBUG, 'create_class_table', (self, sql) self.cursor.execute(sql) - self.create_class_table_indexes(spec) - return cols, mls + + return rdbms_common.Database.create_class_table(self, spec) + + def drop_class_table(self, cn): + sql = 'drop table _%s'%cn + if __debug__: + print >>hyperdb.DEBUG, 'drop_class', (self, sql) + self.cursor.execute(sql) + + sql = 'drop sequence _%s_ids'%cn + if __debug__: + print >>hyperdb.DEBUG, 'drop_class', (self, sql) + self.cursor.execute(sql) def create_journal_table(self, spec): cols = ',' . join(['"%s" VARCHAR(255)'%x @@ -185,6 +193,20 @@ class Database(rdbms_common.Database): self.cursor.execute(sql) self.create_multilink_table_indexes(spec, ml) + def newid(self, classname): + sql = "select nextval('_%s_ids') from dual"%classname + if __debug__: + print >>hyperdb.DEBUG, 'setid', (self, sql) + self.cursor.execute(sql) + return self.cursor.fetchone()[0] + + def setid(self, classname, setid): + sql = "select setval('_%s_ids', %s) from dual"%(classname, int(setid)) + if __debug__: + print >>hyperdb.DEBUG, 'setid', (self, sql) + self.cursor.execute(sql) + + class Class(rdbms_common.Class): pass class IssueClass(rdbms_common.IssueClass): diff --git a/roundup/backends/back_sqlite.py b/roundup/backends/back_sqlite.py index 360fd70..32f3d7c 100644 --- a/roundup/backends/back_sqlite.py +++ b/roundup/backends/back_sqlite.py @@ -1,13 +1,17 @@ -# $Id: back_sqlite.py,v 1.19 2004-03-21 23:45:44 richard Exp $ +# $Id: back_sqlite.py,v 1.20 2004-03-22 07:45:39 richard Exp $ '''Implements a backend for SQLite. See https://pysqlite.sourceforge.net/ for pysqlite info + + +NOTE: we use the rdbms_common table creation methods which define datatypes +for the columns, but sqlite IGNORES these specifications. ''' __docformat__ = 'restructuredtext' import os, base64, marshal -from roundup import hyperdb +from roundup import hyperdb, date, password from roundup.backends import rdbms_common import sqlite @@ -15,6 +19,26 @@ class Database(rdbms_common.Database): # char to use for positional arguments arg = '%s' + hyperdb_to_sql_value = { + hyperdb.String : str, + hyperdb.Date : lambda x: x.serialise(), + hyperdb.Link : int, + hyperdb.Interval : lambda x: x.serialise(), + hyperdb.Password : str, + hyperdb.Boolean : int, + hyperdb.Number : lambda x: x, + } + sql_to_hyperdb_value = { + hyperdb.String : str, + hyperdb.Date : lambda x: date.Date(str(x)), +# hyperdb.Link : int, # XXX numeric ids + hyperdb.Link : str, + hyperdb.Interval : date.Interval, + hyperdb.Password : lambda x: password.Password(encrypted=x), + hyperdb.Boolean : int, + hyperdb.Number : rdbms_common._num_cvt, + } + def sql_open_connection(self): db = os.path.join(self.config.DATABASE, 'db') conn = sqlite.connect(db=db) @@ -48,13 +72,13 @@ class Database(rdbms_common.Database): 'sessions(session_key)') # full-text indexing store - self.cursor.execute('CREATE TABLE _textids (_class varchar, ' - '_itemid varchar, _prop varchar, _textid integer) ') - self.cursor.execute('CREATE TABLE _words (_word varchar, ' + self.cursor.execute('CREATE TABLE __textids (_class varchar, ' + '_itemid varchar, _prop varchar, _textid integer primary key) ') + self.cursor.execute('CREATE TABLE __words (_word varchar, ' '_textid integer)') - self.cursor.execute('CREATE INDEX words_word_ids ON _words(_word)') + self.cursor.execute('CREATE INDEX words_word_ids ON __words(_word)') sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) - self.cursor.execute(sql, ('_textids', 1)) + self.cursor.execute(sql, ('__textids', 1)) def add_actor_column(self): # update existing tables to have the new actor column @@ -188,6 +212,46 @@ class Database(rdbms_common.Database): return 1 return 0 + # old-skool id generation + def newid(self, classname): + ''' Generate a new id for the given class + ''' + # get the next ID + sql = 'select num from ids where name=%s'%self.arg + if __debug__: + print >>hyperdb.DEBUG, 'newid', (self, sql, classname) + self.cursor.execute(sql, (classname, )) + newid = int(self.cursor.fetchone()[0]) + + # update the counter + sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) + vals = (int(newid)+1, classname) + if __debug__: + print >>hyperdb.DEBUG, 'newid', (self, sql, vals) + self.cursor.execute(sql, vals) + + # return as string + return str(newid) + + def setid(self, classname, setid): + ''' Set the id counter: used during import of database + + We add one to make it behave like the seqeunces in postgres. + ''' + sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) + vals = (int(setid)+1, classname) + if __debug__: + print >>hyperdb.DEBUG, 'setid', (self, sql, vals) + self.cursor.execute(sql, vals) + + def create_class(self, spec): + rdbms_common.Database.create_class(self, spec) + sql = 'insert into ids (name, num) values (%s, %s)' + vals = (spec.classname, 1) + if __debug__: + print >>hyperdb.DEBUG, 'create_class', (self, sql, vals) + self.cursor.execute(sql, vals) + class sqliteClass: def filter(self, search_matches, filterspec, sort=(None,None), group=(None,None)): diff --git a/roundup/backends/indexer_rdbms.py b/roundup/backends/indexer_rdbms.py index 3528083..82437b1 100644 --- a/roundup/backends/indexer_rdbms.py +++ b/roundup/backends/indexer_rdbms.py @@ -34,19 +34,21 @@ class Indexer(Indexer): # first, find the id of the (classname, itemid, property) a = self.db.arg - sql = 'select _textid from _textids where _class=%s and '\ + sql = 'select _textid from __textids where _class=%s and '\ '_itemid=%s and _prop=%s'%(a, a, a) self.db.cursor.execute(sql, identifier) r = self.db.cursor.fetchone() if not r: - id = self.db.newid('_textids') - sql = 'insert into _textids (_textid, _class, _itemid, _prop)'\ + id = self.db.newid('__textids') + sql = 'insert into __textids (_textid, _class, _itemid, _prop)'\ ' values (%s, %s, %s, %s)'%(a, a, a, a) self.db.cursor.execute(sql, (id, ) + identifier) + self.db.cursor.execute('select max(_textid) from __textids') + id = self.db.cursor.fetchone()[0] else: id = int(r[0]) # clear out any existing indexed values - sql = 'delete from _words where _textid=%s'%a + sql = 'delete from __words where _textid=%s'%a self.db.cursor.execute(sql, (id, )) # ok, find all the words in the text @@ -60,11 +62,11 @@ class Indexer(Indexer): # for each word, add an entry in the db for word in words: # don't dupe - sql = 'select * from _words where _word=%s and _textid=%s'%(a, a) + sql = 'select * from __words where _word=%s and _textid=%s'%(a, a) self.db.cursor.execute(sql, (word, id)) if self.db.cursor.fetchall(): continue - sql = 'insert into _words (_word, _textid) values (%s, %s)'%(a, a) + sql = 'insert into __words (_word, _textid) values (%s, %s)'%(a, a) self.db.cursor.execute(sql, (word, id)) def find(self, wordlist): @@ -75,13 +77,13 @@ class Indexer(Indexer): l = [word.upper() for word in wordlist if 26 > len(word) > 2] a = ','.join([self.db.arg] * len(l)) - sql = 'select distinct(_textid) from _words where _word in (%s)'%a + sql = 'select distinct(_textid) from __words where _word in (%s)'%a self.db.cursor.execute(sql, tuple(l)) r = self.db.cursor.fetchall() if not r: return {} a = ','.join([self.db.arg] * len(r)) - sql = 'select _class, _itemid, _prop from _textids '\ + sql = 'select _class, _itemid, _prop from __textids '\ 'where _textid in (%s)'%a self.db.cursor.execute(sql, tuple([int(id) for (id,) in r])) # self.search_index has the results as {some id: identifier} ... diff --git a/roundup/backends/rdbms_common.py b/roundup/backends/rdbms_common.py index e7c4f42..56d0e1c 100644 --- a/roundup/backends/rdbms_common.py +++ b/roundup/backends/rdbms_common.py @@ -1,4 +1,4 @@ -# $Id: rdbms_common.py,v 1.83 2004-03-21 23:39:08 richard Exp $ +# $Id: rdbms_common.py,v 1.84 2004-03-22 07:45:39 richard Exp $ ''' Relational database (SQL) backend common code. Basics: @@ -46,6 +46,13 @@ from roundup.date import Range # number of rows to keep in memory ROW_CACHE_SIZE = 100 +def _num_cvt(num): + num = str(num) + try: + return int(num) + except: + return float(num) + class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' Wrapper around an SQL database that presents a hyperdb interface. @@ -212,22 +219,43 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): klass.index(nodeid) self.indexer.save_index() + + hyperdb_to_sql_datatypes = { + hyperdb.String : 'VARCHAR(255)', + hyperdb.Date : 'TIMESTAMP', + hyperdb.Link : 'INTEGER', + hyperdb.Interval : 'VARCHAR(255)', + hyperdb.Password : 'VARCHAR(255)', + hyperdb.Boolean : 'INTEGER', + hyperdb.Number : 'REAL', + } def determine_columns(self, properties): ''' Figure the column names and multilink properties from the spec "properties" is a list of (name, prop) where prop may be an instance of a hyperdb "type" _or_ a string repr of that type. ''' - cols = ['_actor', '_activity', '_creator', '_creation'] + cols = [ + ('_actor', 'INTEGER'), + ('_activity', 'DATE'), + ('_creator', 'INTEGER'), + ('_creation', 'DATE') + ] mls = [] # add the multilinks separately for col, prop in properties: if isinstance(prop, Multilink): mls.append(col) - elif isinstance(prop, type('')) and prop.find('Multilink') != -1: - mls.append(col) - else: - cols.append('_'+col) + continue + + if isinstance(prop, type('')): + raise ValueError, "string property spec!" + #and prop.find('Multilink') != -1: + #mls.append(col) + + datatype = self.hyperdb_to_sql_datatypes[prop.__class__] + cols.append(('_'+col, datatype)) + cols.sort() return cols, mls @@ -315,11 +343,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cols, mls = self.determine_columns(spec.properties.items()) # add on our special columns - cols.append('id') - cols.append('__retired__') + cols.append(('id', 'INTEGER PRIMARY KEY')) + cols.append(('__retired__', 'INTEGER DEFAULT 0')) # create the base table - scols = ','.join(['%s varchar'%x for x in cols]) + scols = ','.join(['%s %s'%x for x in cols]) sql = 'create table _%s (%s)'%(spec.classname, scols) if __debug__: print >>hyperdb.DEBUG, 'create_class', (self, sql) @@ -332,13 +360,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def create_class_table_indexes(self, spec): ''' create the class table for the given spec ''' - # create id index - index_sql1 = 'create index _%s_id_idx on _%s(id)'%( - spec.classname, spec.classname) - if __debug__: - print >>hyperdb.DEBUG, 'create_index', (self, index_sql1) - self.cursor.execute(index_sql1) - # create __retired__ index index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%( spec.classname, spec.classname) @@ -376,14 +397,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def create_class_table_key_index(self, cn, key): ''' create the class table for the given spec ''' + sql = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key, cn, key) if __debug__: - print >>hyperdb.DEBUG, 'update_class setting keyprop %r'% \ - key - index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key, - cn, key) - if __debug__: - print >>hyperdb.DEBUG, 'create_index', (self, index_sql3) - self.cursor.execute(index_sql3) + print >>hyperdb.DEBUG, 'create_class_tab_key_index', (self, sql) + self.cursor.execute(sql) def drop_class_table_key_index(self, cn, key): table_name = '_%s'%cn @@ -392,7 +409,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): return sql = 'drop index '+index_name if __debug__: - print >>hyperdb.DEBUG, 'drop_index', (self, sql) + print >>hyperdb.DEBUG, 'drop_class_tab_key_index', (self, sql) self.cursor.execute(sql) def create_journal_table(self, spec): @@ -402,9 +419,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # journal table cols = ','.join(['%s varchar'%x for x in 'nodeid date tag action params'.split()]) - sql = 'create table %s__journal (%s)'%(spec.classname, cols) + sql = '''create table %s__journal ( + nodeid integer, date timestamp, tag varchar(255), + action varchar(255), params varchar(25))'''%spec.classname if __debug__: - print >>hyperdb.DEBUG, 'create_class', (self, sql) + print >>hyperdb.DEBUG, 'create_journal_table', (self, sql) self.cursor.execute(sql) self.create_journal_table_indexes(spec) @@ -476,13 +495,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): for ml in mls: self.create_multilink_table(spec, ml) - # ID counter - sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg) - vals = (spec.classname, 1) - if __debug__: - print >>hyperdb.DEBUG, 'create_class', (self, sql, vals) - self.cursor.execute(sql, vals) - def drop_class(self, cn, spec): ''' Drop the given table from the database. @@ -497,10 +509,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # drop class table and indexes self.drop_class_table_indexes(cn, spec[0]) - sql = 'drop table _%s'%cn - if __debug__: - print >>hyperdb.DEBUG, 'drop_class', (self, sql) - self.cursor.execute(sql) + + self.drop_class_table(cn) # drop journal table and indexes self.drop_journal_table_indexes(cn) @@ -517,6 +527,12 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): print >>hyperdb.DEBUG, 'drop_class', (self, sql) self.cursor.execute(sql) + def drop_class_table(self, cn): + sql = 'drop table _%s'%cn + if __debug__: + print >>hyperdb.DEBUG, 'drop_class', (self, sql) + self.cursor.execute(sql) + # # Classes # @@ -580,41 +596,19 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): print >>hyperdb.DEBUG, 'clear', (self, sql) self.cursor.execute(sql) - # - # Node IDs - # - def newid(self, classname): - ''' Generate a new id for the given class - ''' - # get the next ID - sql = 'select num from ids where name=%s'%self.arg - if __debug__: - print >>hyperdb.DEBUG, 'newid', (self, sql, classname) - self.cursor.execute(sql, (classname, )) - newid = int(self.cursor.fetchone()[0]) - - # update the counter - sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) - vals = (int(newid)+1, classname) - if __debug__: - print >>hyperdb.DEBUG, 'newid', (self, sql, vals) - self.cursor.execute(sql, vals) - - # return as string - return str(newid) - - def setid(self, classname, setid): - ''' Set the id counter: used during import of database - ''' - sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg) - vals = (setid, classname) - if __debug__: - print >>hyperdb.DEBUG, 'setid', (self, sql, vals) - self.cursor.execute(sql, vals) - # # Nodes # + + hyperdb_to_sql_value = { + hyperdb.String : str, + hyperdb.Date : lambda x: x.formal(sep=' ', sec='%f'), + hyperdb.Link : int, + hyperdb.Interval : lambda x: x.serialise(), + hyperdb.Password : str, + hyperdb.Boolean : int, + hyperdb.Number : lambda x: x, + } def addnode(self, classname, nodeid, node): ''' Add the specified node to its class's db. ''' @@ -626,20 +620,24 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cols, mls = self.determine_columns(cl.properties.items()) # we'll be supplied these props if we're doing an import - if not node.has_key('creator'): + values = node.copy() + if not values.has_key('creator'): # add in the "calculated" properties (dupe so we don't affect # calling code's node assumptions) - node = node.copy() - node['creation'] = node['activity'] = date.Date() - node['actor'] = node['creator'] = self.getuid() + values['creation'] = values['activity'] = date.Date() + values['actor'] = values['creator'] = self.getuid() + + cl = self.classes[classname] + props = cl.getprops(protected=1) + del props['id'] # default the non-multilink columns - for col, prop in cl.properties.items(): - if not node.has_key(col): + for col, prop in props.items(): + if not values.has_key(col): if isinstance(prop, Multilink): - node[col] = [] + values[col] = [] else: - node[col] = None + values[col] = None # clear this node out of the cache if it's in there key = (classname, nodeid) @@ -647,13 +645,20 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): del self.cache[key] self.cache_lru.remove(key) - # make the node data safe for the DB - node = self.serialise(classname, node) + # figure the values to insert + vals = [] + for col,dt in cols: + prop = props[col[1:]] + value = values[col[1:]] + if value: + value = self.hyperdb_to_sql_value[prop.__class__](value) + vals.append(value) + vals.append(nodeid) + vals = tuple(vals) # make sure the ordering is correct for column name -> column value - vals = tuple([node[col[1:]] for col in cols]) + (nodeid, 0) - s = ','.join([self.arg for x in cols]) + ',%s,%s'%(self.arg, self.arg) - cols = ','.join(cols) + ',id,__retired__' + s = ','.join([self.arg for x in cols]) + ',%s'%self.arg + cols = ','.join([col for col,dt in cols]) + ',id' # perform the inserts sql = 'insert into _%s (%s) values (%s)'%(classname, cols, s) @@ -689,34 +694,42 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): values['activity'] = date.Date() values['actor'] = self.getuid() - # make db-friendly - values = self.serialise(classname, values) - cl = self.classes[classname] + props = cl.getprops() + cols = [] mls = [] # add the multilinks separately - props = cl.getprops() for col in values.keys(): prop = props[col] if isinstance(prop, Multilink): mls.append(col) else: - cols.append('_'+col) + cols.append(col) cols.sort() + # figure the values to insert + vals = [] + for col in cols: + prop = props[col] + value = values[col] + if value is not None: + value = self.hyperdb_to_sql_value[prop.__class__](value) + vals.append(value) + vals.append(int(nodeid)) + vals = tuple(vals) + # if there's any updates to regular columns, do them if cols: # make sure the ordering is correct for column name -> column value - sqlvals = tuple([values[col[1:]] for col in cols]) + (nodeid,) - s = ','.join(['%s=%s'%(x, self.arg) for x in cols]) + s = ','.join(['_%s=%s'%(x, self.arg) for x in cols]) cols = ','.join(cols) # perform the update sql = 'update _%s set %s where id=%s'%(classname, s, self.arg) if __debug__: - print >>hyperdb.DEBUG, 'setnode', (self, sql, sqlvals) - self.cursor.execute(sql, sqlvals) + print >>hyperdb.DEBUG, 'setnode', (self, sql, vals) + self.cursor.execute(sql, vals) # now the fun bit, updating the multilinks ;) for col, (add, remove) in multilink_changes.items(): @@ -725,16 +738,28 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn, self.arg, self.arg) for addid in add: - self.sql(sql, (nodeid, addid)) + # XXX numeric ids + self.sql(sql, (int(nodeid), int(addid))) if remove: sql = 'delete from %s where nodeid=%s and linkid=%s'%(tn, self.arg, self.arg) for removeid in remove: - self.sql(sql, (nodeid, removeid)) + # XXX numeric ids + self.sql(sql, (int(nodeid), int(removeid))) # make sure we do the commit-time extra stuff for this node self.transactions.append((self.doSaveNode, (classname, nodeid, values))) + sql_to_hyperdb_value = { + hyperdb.String : str, + hyperdb.Date : date.Date, +# hyperdb.Link : int, # XXX numeric ids + hyperdb.Link : str, + hyperdb.Interval : date.Interval, + hyperdb.Password : lambda x: password.Password(encrypted=x), + hyperdb.Boolean : int, + hyperdb.Number : _num_cvt, + } def getnode(self, classname, nodeid): ''' Get a node from the database. ''' @@ -753,7 +778,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # figure the columns we're fetching cl = self.classes[classname] cols, mls = self.determine_columns(cl.properties.items()) - scols = ','.join(cols) + scols = ','.join([col for col,dt in cols]) # perform the basic property fetch sql = 'select %s from _%s where id=%s'%(scols, classname, self.arg) @@ -765,8 +790,14 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # make up the node node = {} + props = cl.getprops(protected=1) for col in range(len(cols)): - node[cols[col][1:]] = values[col] + name = cols[col][0][1:] + value = values[col] + if value is not None: + value = self.sql_to_hyperdb_value[props[name].__class__](value) + node[name] = value + # now the multilinks for col in mls: @@ -775,10 +806,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): self.arg) self.cursor.execute(sql, (nodeid,)) # extract the first column from the result - node[col] = [x[0] for x in self.cursor.fetchall()] - - # un-dbificate the node data - node = self.unserialise(classname, node) + # XXX numeric ids + node[col] = [str(x[0]) for x in self.cursor.fetchall()] # save off in the cache key = (classname, nodeid) @@ -826,71 +855,6 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): sql = 'delete from %s__journal where nodeid=%s'%(classname, self.arg) self.sql(sql, (nodeid,)) - def serialise(self, classname, node): - '''Copy the node contents, converting non-marshallable data into - marshallable data. - ''' - if __debug__: - print >>hyperdb.DEBUG, 'serialise', classname, node - properties = self.getclass(classname).getprops() - d = {} - for k, v in node.items(): - # if the property doesn't exist, or is the "retired" flag then - # it won't be in the properties dict - if not properties.has_key(k): - d[k] = v - continue - - # get the property spec - prop = properties[k] - - if isinstance(prop, Password) and v is not None: - d[k] = str(v) - elif isinstance(prop, Date) and v is not None: - d[k] = v.serialise() - elif isinstance(prop, Interval) and v is not None: - d[k] = v.serialise() - else: - d[k] = v - return d - - def unserialise(self, classname, node): - '''Decode the marshalled node data - ''' - if __debug__: - print >>hyperdb.DEBUG, 'unserialise', classname, node - properties = self.getclass(classname).getprops() - d = {} - for k, v in node.items(): - # if the property doesn't exist, or is the "retired" flag then - # it won't be in the properties dict - if not properties.has_key(k): - d[k] = v - continue - - # get the property spec - prop = properties[k] - - if isinstance(prop, Date) and v is not None: - d[k] = date.Date(v) - elif isinstance(prop, Interval) and v is not None: - d[k] = date.Interval(v) - elif isinstance(prop, Password) and v is not None: - p = password.Password() - p.unpack(v) - d[k] = p - elif isinstance(prop, Boolean) and v is not None: - d[k] = int(v) - elif isinstance(prop, Number) and v is not None: - # try int first, then assume it's a float - try: - d[k] = int(v) - except ValueError: - d[k] = float(v) - else: - d[k] = v - return d - def hasnode(self, classname, nodeid): ''' Determine if the database has a given node. ''' @@ -930,9 +894,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): else: journaltag = self.getuid() if creation: - journaldate = creation.serialise() + journaldate = creation else: - journaldate = date.Date().serialise() + journaldate = date.Date() # create the journal entry cols = ','.join('nodeid date tag action params'.split()) @@ -960,12 +924,13 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' # make the params db-friendly params = repr(params) - entry = (nodeid, journaldate, journaltag, action, params) + dc = self.hyperdb_to_sql_value[hyperdb.Date] + entry = (nodeid, dc(journaldate), journaltag, action, params) # do the insert a = self.arg - sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(classname, - cols, a, a, a, a, a) + sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%( + classname, cols, a, a, a, a, a) if __debug__: print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry) self.cursor.execute(sql, entry) @@ -980,9 +945,11 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): print >>hyperdb.DEBUG, 'load_journal', (self, sql, nodeid) self.cursor.execute(sql, (nodeid,)) res = [] + dc = self.sql_to_hyperdb_value[hyperdb.Date] for nodeid, date_stamp, user, action, params in self.cursor.fetchall(): params = eval(params) - res.append((nodeid, date.Date(date_stamp), user, action, params)) + # XXX numeric ids + res.append((str(nodeid), dc(date_stamp), user, action, params)) return res def pack(self, pack_before): @@ -1278,7 +1245,8 @@ class Class(hyperdb.Class): if self.do_journal: self.db.addjournal(self.classname, newid, 'create', {}) - return newid + # XXX numeric ids + return str(newid) def export_list(self, propnames, nodeid): ''' Export a node - generate a list of CSV-able data in the order @@ -1844,7 +1812,8 @@ class Class(hyperdb.Class): keyvalue, self.classname) # return the id - return row[0] + # XXX numeric ids + return str(row[0]) def find(self, **propspec): '''Get the ids of nodes in this class which link to the given nodes. @@ -1923,9 +1892,11 @@ class Class(hyperdb.Class): else: o = o[0] t = ', '.join(tables) - sql = 'select distinct(id) from %s where __retired__ <> %s and %s'%(t, a, o) + sql = 'select distinct(id) from %s where __retired__ <> %s and %s'%( + t, a, o) self.db.sql(sql, allvalues) - l = [x[0] for x in self.db.sql_fetchall()] + # XXX numeric ids + l = [str(x[0]) for x in self.db.sql_fetchall()] if __debug__: print >>hyperdb.DEBUG, 'find ... ', l return l @@ -1953,7 +1924,8 @@ class Class(hyperdb.Class): s, self.db.arg) args.append(0) self.db.sql(sql, tuple(args)) - l = [x[0] for x in self.db.sql_fetchall()] + # XXX numeric ids + l = [str(x[0]) for x in self.db.sql_fetchall()] if __debug__: print >>hyperdb.DEBUG, 'find ... ', l return l @@ -1983,7 +1955,8 @@ class Class(hyperdb.Class): if __debug__: print >>hyperdb.DEBUG, 'getnodeids', (self, sql, retired) self.db.cursor.execute(sql, args) - ids = [x[0] for x in self.db.cursor.fetchall()] + # XXX numeric ids + ids = [str(x[0]) for x in self.db.cursor.fetchall()] return ids def filter(self, search_matches, filterspec, sort=(None,None), @@ -2077,20 +2050,21 @@ class Class(hyperdb.Class): where.append('_%s=%s'%(k, a)) args.append(v) elif isinstance(propclass, Date): + dc = self.db.hyperdb_to_sql_value[hyperdb.Date] if isinstance(v, type([])): s = ','.join([a for x in v]) where.append('_%s in (%s)'%(k, s)) - args = args + [date.Date(x).serialise() for x in v] + args = args + [dc(date.Date(v)) for x in v] else: try: # Try to filter on range of dates date_rng = Range(v, date.Date, offset=timezone) - if (date_rng.from_value): + if date_rng.from_value: where.append('_%s >= %s'%(k, a)) - args.append(date_rng.from_value.serialise()) - if (date_rng.to_value): + args.append(dc(date_rng.from_value)) + if date_rng.to_value: where.append('_%s <= %s'%(k, a)) - args.append(date_rng.to_value.serialise()) + args.append(dc(date_rng.to_value)) except ValueError: # If range creation fails - ignore that search parameter pass @@ -2103,10 +2077,10 @@ class Class(hyperdb.Class): try: # Try to filter on range of intervals date_rng = Range(v, date.Interval) - if (date_rng.from_value): + if date_rng.from_value: where.append('_%s >= %s'%(k, a)) args.append(date_rng.from_value.serialise()) - if (date_rng.to_value): + if date_rng.to_value: where.append('_%s <= %s'%(k, a)) args.append(date_rng.to_value.serialise()) except ValueError: @@ -2188,7 +2162,8 @@ class Class(hyperdb.Class): l = self.db.sql_fetchall() # return the IDs (the first column) - return [row[0] for row in l] + # XXX numeric ids + return [str(row[0]) for row in l] def count(self): '''Get the number of nodes in this class. diff --git a/roundup/date.py b/roundup/date.py index 8f60a72..6037711 100644 --- a/roundup/date.py +++ b/roundup/date.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: date.py,v 1.60 2004-02-11 23:55:08 richard Exp $ +# $Id: date.py,v 1.61 2004-03-22 07:45:39 richard Exp $ """Date, time and time interval handling. """ @@ -103,11 +103,19 @@ class Date: """ if type(spec) == type(''): self.set(spec, offset=offset, add_granularity=add_granularity) - else: + return + elif hasattr(spec, 'tuple'): + spec = spec.tuple() + try: y,m,d,H,M,S,x,x,x = spec + frac = S - int(S) ts = calendar.timegm((y,m,d,H+offset,M,S,0,0,0)) self.year, self.month, self.day, self.hour, self.minute, \ self.second, x, x, x = time.gmtime(ts) + # we lost the fractional part + self.second = self.second + frac + except: + raise ValueError, 'Unknown spec %r'%spec usagespec='[yyyy]-[mm]-[dd].[H]H:MM[:SS][offset]' def set(self, spec, offset=0, date_re=re.compile(r''' @@ -125,8 +133,9 @@ class Date: m = serialised_re.match(spec) if m is not None: # we're serialised - easy! - self.year, self.month, self.day, self.hour, self.minute, \ - self.second = map(int, m.groups()[:6]) + g = m.groups() + (self.year, self.month, self.day, self.hour, self.minute, + self.second) = map(int, g[:6]) return # not serialised data, try usual format @@ -140,7 +149,11 @@ class Date: _add_granularity(info, 'SMHdmyab') # get the current date as our default - y,m,d,H,M,S,x,x,x = time.gmtime(time.time()) + ts = time.time() + frac = ts - int(ts) + y,m,d,H,M,S,x,x,x = time.gmtime(ts) + # gmtime loses the fractional seconds + S = S + frac if info['y'] is not None or info['a'] is not None: if info['y'] is not None: @@ -167,9 +180,12 @@ class Date: S = S - 1 # now handle the adjustment of hour + frac = S - int(S) ts = calendar.timegm((y,m,d,H,M,S,0,0,0)) self.year, self.month, self.day, self.hour, self.minute, \ self.second, x, x, x = time.gmtime(ts) + # we lost the fractional part along the way + self.second = self.second + frac if info.get('o', None): try: @@ -292,8 +308,12 @@ class Date: def __str__(self): """Return this date as a string in the yyyy-mm-dd.hh:mm:ss format.""" - return '%4d-%02d-%02d.%02d:%02d:%02d'%(self.year, self.month, self.day, - self.hour, self.minute, self.second) + return self.formal() + + def formal(self, sep='.', sec='%02d'): + f = '%%4d-%%02d-%%02d%s%%02d:%%02d:%s'%(sep, sec) + return f%(self.year, self.month, self.day, self.hour, self.minute, + self.second) def pretty(self, format='%d %B %Y'): ''' print up the date date using a pretty format... @@ -327,8 +347,11 @@ class Date: def timestamp(self): ''' return a UNIX timestamp for this date ''' - return calendar.timegm((self.year, self.month, self.day, self.hour, + frac = self.second - int(self.second) + ts = calendar.timegm((self.year, self.month, self.day, self.hour, self.minute, self.second, 0, 0, 0)) + # we lose the fractional part + return ts + frac class Interval: ''' diff --git a/roundup/password.py b/roundup/password.py index a267e42..14dbcda 100644 --- a/roundup/password.py +++ b/roundup/password.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: password.py,v 1.11 2004-02-11 23:55:08 richard Exp $ +# $Id: password.py,v 1.12 2004-03-22 07:45:39 richard Exp $ """Password handling (encoding, decoding). """ @@ -82,13 +82,15 @@ class Password: default_scheme = 'SHA' # new encryptions use this scheme pwre = re.compile(r'{(\w+)}(.+)') - def __init__(self, plaintext=None, scheme=None): + def __init__(self, plaintext=None, scheme=None, encrypted=None): '''Call setPassword if plaintext is not None.''' if scheme is None: scheme = self.default_scheme if plaintext is not None: self.password = encodePassword(plaintext, self.default_scheme) self.scheme = self.default_scheme + elif encrypted is not None: + self.unpack(encrypted) else: self.password = None self.scheme = self.default_scheme diff --git a/test/db_test_base.py b/test/db_test_base.py index 193e145..3acc05b 100644 --- a/test/db_test_base.py +++ b/test/db_test_base.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: db_test_base.py,v 1.18 2004-03-19 04:47:59 richard Exp $ +# $Id: db_test_base.py,v 1.19 2004-03-22 07:45:40 richard Exp $ import unittest, os, shutil, errno, imp, sys, time, pprint @@ -110,14 +110,20 @@ class DBTest(MyTestCase): i.set(id1, title='asfasd') self.assertNotEqual(i.get(id1, 'creator'), i.get(id1, 'actor')) - # - # basic operations - # + # ID number controls def testIDGeneration(self): id1 = self.db.issue.create(title="spam", status='1') id2 = self.db.issue.create(title="eggs", status='2') self.assertNotEqual(id1, id2) + def testIDSetting(self): + # XXX numeric ids + self.db.setid('issue', 10) + id2 = self.db.issue.create(title="eggs", status='2') + self.assertEqual('11', id2) + # + # basic operations + # def testEmptySet(self): id1 = self.db.issue.create(title="spam", status='1') self.db.issue.set(id1) @@ -587,24 +593,18 @@ class DBTest(MyTestCase): # test disabling journalling # ... get the last entry - time.sleep(1) - entry = self.db.getjournal('issue', '1')[-1] - (x, date_stamp, x, x, x) = entry + jlen = len(self.db.getjournal('user', '1')) self.db.issue.disableJournalling() self.db.issue.set('1', title='hello world') self.db.commit() - entry = self.db.getjournal('issue', '1')[-1] - (x, date_stamp2, x, x, x) = entry # see if the change was journalled when it shouldn't have been - self.assertEqual(date_stamp, date_stamp2) - time.sleep(1) + self.assertEqual(jlen, len(self.db.getjournal('user', '1'))) + jlen = len(self.db.getjournal('issue', '1')) self.db.issue.enableJournalling() self.db.issue.set('1', title='hello world 2') self.db.commit() - entry = self.db.getjournal('issue', '1')[-1] - (x, date_stamp2, x, x, x) = entry # see if the change was journalled - self.assertNotEqual(date_stamp, date_stamp2) + self.assertNotEqual(jlen, len(self.db.getjournal('issue', '1'))) def testJournalPreCommit(self): id = self.db.user.create(username="mary") diff --git a/test/test_mailgw.py b/test/test_mailgw.py index dcdcf01..371b755 100644 --- a/test/test_mailgw.py +++ b/test/test_mailgw.py @@ -8,7 +8,7 @@ # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # -# $Id: test_mailgw.py,v 1.65 2004-03-19 04:47:59 richard Exp $ +# $Id: test_mailgw.py,v 1.66 2004-03-22 07:45:40 richard Exp $ import unittest, tempfile, os, shutil, errno, imp, sys, difflib, rfc822 @@ -254,6 +254,7 @@ messages: 1 nosy: Chef, mary, richard status: unread title: Testing... + _______________________________________________________________________ Roundup issue tracker @@ -303,6 +304,7 @@ This is a second followup ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -351,6 +353,7 @@ This is a followup assignedto: -> mary nosy: +john, mary status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -393,6 +396,7 @@ This is a followup assignedto: -> mary nosy: +john, mary status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -436,6 +440,7 @@ This is a followup ---------- nosy: +john status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -480,6 +485,7 @@ This is a followup ---------- nosy: +john status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -524,6 +530,7 @@ This is a followup ---------- nosy: +john status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -566,6 +573,7 @@ This is a followup ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -609,6 +617,7 @@ This is a followup ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -725,6 +734,7 @@ A message with encoding (encoded oe =C3=B6) ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -777,6 +787,7 @@ A message with first part encoded (encoded oe =C3=B6) ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker @@ -850,6 +861,7 @@ This is a followup ---------- status: unread -> chatting + _______________________________________________________________________ Roundup issue tracker