From: richard Date: Fri, 13 Sep 2002 08:20:13 +0000 (+0000) Subject: Fixed: X-Git-Url: https://git.tokkee.org/?a=commitdiff_plain;h=7d7c7ecd969f4bceb1f02727daeee9a6d5abcdb6;p=roundup.git Fixed: . handling of None for Date/Interval/Password values in export/import . handling of journal values in export/import Also played with metakit backend some, fixing some of the unit tests breakages. Hrm. git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1166 57a73879-2fb5-44c3-a270-3262357dd7e2 --- diff --git a/CHANGES.txt b/CHANGES.txt index 343e51e..db67d44 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,10 @@ This file contains the changes to the Roundup system over time. The entries are given with the most recent entry first. +2002-09-?? 0.5.0 ???? + . handling of None for Date/Interval/Password values in export/import + . handling of journal values in export/import + 2002-09-13 0.5.0 beta2 . all backends now have a .close() method, and it's used everywhere . fixed bug in detectors __init__ diff --git a/roundup/backends/back_anydbm.py b/roundup/backends/back_anydbm.py index 1721044..f36213d 100644 --- a/roundup/backends/back_anydbm.py +++ b/roundup/backends/back_anydbm.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -#$Id: back_anydbm.py,v 1.77 2002-09-12 07:23:23 richard Exp $ +#$Id: back_anydbm.py,v 1.78 2002-09-13 08:20:07 richard Exp $ ''' This module defines a backend that saves the hyperdatabase in a database chosen by anydbm. It is guaranteed to always be available in python @@ -154,7 +154,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): if os.path.exists(path): db_type = whichdb.whichdb(path) if not db_type: - raise hyperdb.DatabaseError, "Couldn't identify database type" + raise DatabaseError, "Couldn't identify database type" elif os.path.exists(path+'.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! @@ -182,7 +182,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): try: dbm = __import__(db_type) except ImportError: - raise hyperdb.DatabaseError, \ + raise DatabaseError, \ "Couldn't open database - the required module '%s'"\ " is not available"%db_type if __debug__: @@ -447,7 +447,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # # Journal # - def addjournal(self, classname, nodeid, action, params): + def addjournal(self, classname, nodeid, action, params, creator=None, + creation=None): ''' Journal the Action 'action' may be: @@ -457,9 +458,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): ''' if __debug__: print >>hyperdb.DEBUG, 'addjournal', (self, classname, nodeid, - action, params) + action, params, creator, creation) self.transactions.append((self.doSaveJournal, (classname, nodeid, - action, params))) + action, params, creator, creation))) def getjournal(self, classname, nodeid): ''' get the journal for id @@ -603,28 +604,22 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): self.databases[db_name] = self.opendb(db_name, 'c') return self.databases[db_name] - def doSaveJournal(self, classname, nodeid, action, params): - # handle supply of the special journalling parameters (usually - # supplied on importing an existing database) + def doSaveJournal(self, classname, nodeid, action, params, creator, + creation): + # serialise the parameters now if necessary if isinstance(params, type({})): - if params.has_key('creator'): - journaltag = self.user.get(params['creator'], 'username') - del params['creator'] - else: - journaltag = self.journaltag - if params.has_key('created'): - journaldate = params['created'].serialise() - del params['created'] - else: - journaldate = date.Date().serialise() - if params.has_key('activity'): - del params['activity'] - - # serialise the parameters now if action in ('set', 'create'): params = self.serialise(classname, params) + + # handle supply of the special journalling parameters (usually + # supplied on importing an existing database) + if creator: + journaltag = creator else: journaltag = self.journaltag + if creation: + journaldate = creation.serialise() + else: journaldate = date.Date().serialise() # create the journal entry @@ -889,7 +884,9 @@ class Class(hyperdb.Class): proptype = properties[prop] value = self.get(nodeid, prop) # "marshal" data where needed - if isinstance(proptype, hyperdb.Date): + if value is None: + pass + elif isinstance(proptype, hyperdb.Date): value = value.get_tuple() elif isinstance(proptype, hyperdb.Interval): value = value.get_tuple() @@ -924,6 +921,9 @@ class Class(hyperdb.Class): if propname == 'id': newid = value continue + elif value is None: + # don't set Nones + continue elif isinstance(prop, hyperdb.Date): value = date.Date(value) elif isinstance(prop, hyperdb.Interval): @@ -932,12 +932,26 @@ class Class(hyperdb.Class): pwd = password.Password() pwd.unpack(value) value = pwd - if value is not None: - d[propname] = value + d[propname] = value + + # extract the extraneous journalling gumpf and nuke it + if d.has_key('creator'): + creator = d['creator'] + del d['creator'] + else: + creator = None + if d.has_key('creation'): + creation = d['creation'] + del d['creation'] + else: + creation = None + if d.has_key('activity'): + del d['activity'] - # add + # add the node and journal self.db.addnode(self.classname, newid, d) - self.db.addjournal(self.classname, newid, 'create', d) + self.db.addjournal(self.classname, newid, 'create', d, creator, + creation) return newid def get(self, nodeid, propname, default=_marker, cache=1): @@ -1825,7 +1839,7 @@ class FileClass(Class): # extract the "content" property from the proplist i = propnames.index('content') - content = proplist[i] + content = eval(proplist[i]) del propnames[i] del proplist[i] diff --git a/roundup/backends/back_bsddb.py b/roundup/backends/back_bsddb.py index 9ffd906..6f7aa9d 100644 --- a/roundup/backends/back_bsddb.py +++ b/roundup/backends/back_bsddb.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -#$Id: back_bsddb.py,v 1.23 2002-09-10 00:11:50 richard Exp $ +#$Id: back_bsddb.py,v 1.24 2002-09-13 08:20:11 richard Exp $ ''' This module defines a backend that saves the hyperdatabase in BSDDB. ''' @@ -107,24 +107,3 @@ class Database(Database): self.databases[db_name] = db return db - def doSaveJournal(self, classname, nodeid, action, params): - # serialise first - if action in ('set', 'create'): - params = self.serialise(classname, params) - - entry = (nodeid, date.Date().get_tuple(), self.journaltag, action, - params) - - if __debug__: - print >>hyperdb.DEBUG, 'doSaveJournal', entry - - db = self.getCachedJournalDB(classname) - - if db.has_key(nodeid): - s = db[nodeid] - l = marshal.loads(s) - l.append(entry) - else: - l = [entry] - - db[nodeid] = marshal.dumps(l) diff --git a/roundup/backends/back_bsddb3.py b/roundup/backends/back_bsddb3.py index 0a70195..be4d80d 100644 --- a/roundup/backends/back_bsddb3.py +++ b/roundup/backends/back_bsddb3.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -#$Id: back_bsddb3.py,v 1.16 2002-09-10 00:11:50 richard Exp $ +#$Id: back_bsddb3.py,v 1.17 2002-09-13 08:20:12 richard Exp $ import bsddb3, os, marshal from roundup import hyperdb, date @@ -103,24 +103,3 @@ class Database(Database): self.databases[db_name] = db return db - def doSaveJournal(self, classname, nodeid, action, params): - # serialise first - if action in ('set', 'create'): - params = self.serialise(classname, params) - - entry = (nodeid, date.Date().get_tuple(), self.journaltag, action, - params) - - if __debug__: - print >>hyperdb.DEBUG, 'doSaveJournal', entry - - db = self.getCachedJournalDB(classname) - - if db.has_key(nodeid): - s = db[nodeid] - l = marshal.loads(s) - l.append(entry) - else: - l = [entry] - - db[nodeid] = marshal.dumps(l) diff --git a/roundup/backends/back_gadfly.py b/roundup/backends/back_gadfly.py index a4add91..b6579bf 100644 --- a/roundup/backends/back_gadfly.py +++ b/roundup/backends/back_gadfly.py @@ -1,4 +1,4 @@ -# $Id: back_gadfly.py,v 1.18 2002-09-12 07:23:23 richard Exp $ +# $Id: back_gadfly.py,v 1.19 2002-09-13 08:20:13 richard Exp $ __doc__ = ''' About Gadfly ============ @@ -659,7 +659,8 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): cursor.execute(sql, (retired,)) return [x[0] for x in cursor.fetchall()] - def addjournal(self, classname, nodeid, action, params): + def addjournal(self, classname, nodeid, action, params, creator=None, + creation=None): ''' Journal the Action 'action' may be: @@ -667,25 +668,20 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): 'link' or 'unlink' -- 'params' is (classname, nodeid, propname) 'retire' -- 'params' is None ''' + # serialise the parameters now if necessary if isinstance(params, type({})): - if params.has_key('creator'): - journaltag = self.user.get(params['creator'], 'username') - del params['creator'] - else: - journaltag = self.journaltag - if params.has_key('created'): - journaldate = params['created'].serialise() - del params['created'] - else: - journaldate = date.Date().serialise() - if params.has_key('activity'): - del params['activity'] - - # serialise the parameters now if action in ('set', 'create'): params = self.serialise(classname, params) + + # handle supply of the special journalling parameters (usually + # supplied on importing an existing database) + if creator: + journaltag = creator else: journaltag = self.journaltag + if creation: + journaldate = creation.serialise() + else: journaldate = date.Date().serialise() # create the journal entry @@ -693,7 +689,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): entry = (nodeid, journaldate, journaltag, action, params) if __debug__: - print >>hyperdb.DEBUG, 'doSaveJournal', entry + print >>hyperdb.DEBUG, 'addjournal', entry # do the insert cursor = self.conn.cursor() @@ -999,6 +995,82 @@ class Class(hyperdb.Class): return newid + def export_list(self, propnames, nodeid): + ''' Export a node - generate a list of CSV-able data in the order + specified by propnames for the given node. + ''' + properties = self.getprops() + l = [] + for prop in propnames: + proptype = properties[prop] + value = self.get(nodeid, prop) + # "marshal" data where needed + if value is None: + pass + elif isinstance(proptype, hyperdb.Date): + value = value.get_tuple() + elif isinstance(proptype, hyperdb.Interval): + value = value.get_tuple() + elif isinstance(proptype, hyperdb.Password): + value = str(value) + l.append(repr(value)) + return l + + def import_list(self, propnames, proplist): + ''' Import a node - all information including "id" is present and + should not be sanity checked. Triggers are not triggered. The + journal should be initialised using the "creator" and "created" + information. + + Return the nodeid of the node imported. + ''' + if self.db.journaltag is None: + raise DatabaseError, 'Database open read-only' + properties = self.getprops() + + # make the new node's property map + d = {} + for i in range(len(propnames)): + # Use eval to reverse the repr() used to output the CSV + value = eval(proplist[i]) + + # Figure the property for this column + propname = propnames[i] + prop = properties[propname] + + # "unmarshal" where necessary + if propname == 'id': + newid = value + continue + elif value is None: + # don't set Nones + continue + elif isinstance(prop, hyperdb.Date): + value = date.Date(value) + elif isinstance(prop, hyperdb.Interval): + value = date.Interval(value) + elif isinstance(prop, hyperdb.Password): + pwd = password.Password() + pwd.unpack(value) + value = pwd + d[propname] = value + + # extract the extraneous journalling gumpf and nuke it + if d.has_key('creator'): + creator = d['creator'] + del d['creator'] + if d.has_key('creation'): + creation = d['creation'] + del d['creation'] + if d.has_key('activity'): + del d['activity'] + + # add the node and journal + self.db.addnode(self.classname, newid, d) + self.db.addjournal(self.classname, newid, 'create', d, creator, + creation) + return newid + _marker = [] def get(self, nodeid, propname, default=_marker, cache=1): '''Get the value of a property on an existing node of this class. @@ -1676,7 +1748,7 @@ class FileClass(Class): # extract the "content" property from the proplist i = propnames.index('content') - content = proplist[i] + content = eval(proplist[i]) del propnames[i] del proplist[i] diff --git a/roundup/backends/back_metakit.py b/roundup/backends/back_metakit.py index 1c562ab..c7592a4 100755 --- a/roundup/backends/back_metakit.py +++ b/roundup/backends/back_metakit.py @@ -121,9 +121,13 @@ class _Database(hyperdb.Database): for cl in self.classes.values(): cl.db = None self._db = None - locking.release_lock(self.lockfile) - del _dbs[self.config.DATABASE] - self.lockfile.close() + if self.lockfile is not None: + locking.release_lock(self.lockfile) + if _dbs.has_key(self.config.DATABASE): + del _dbs[self.config.DATABASE] + if self.lockfile is not None: + self.lockfile.close() + self.lockfile = None self.classes = {} self.indexer = None @@ -296,7 +300,7 @@ class Class: raise IndexError, "%s has no node %s" % (self.classname, nodeid) oldnode = self.uncommitted.setdefault(id, {}) changes = {} - + for key, value in propvalues.items(): # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about @@ -394,7 +398,8 @@ class Class: rmvd.append(id) # register the unlink with the old linked node if self.do_journal and prop.do_journal: - self.db.addjournal(link_class, id, _UNLINK, (self.classname, str(row.id), key)) + self.db.addjournal(link_class, id, _UNLINK, + (self.classname, str(row.id), key)) # handle additions adds = [] @@ -406,7 +411,8 @@ class Class: adds.append(id) # register the link with the newly linked node if self.do_journal and prop.do_journal: - self.db.addjournal(link_class, id, _LINK, (self.classname, str(row.id), key)) + self.db.addjournal(link_class, id, _LINK, + (self.classname, str(row.id), key)) sv = getattr(row, key) i = 0 @@ -421,7 +427,6 @@ class Class: if not rmvd and not adds: del propvalues[key] - elif isinstance(prop, hyperdb.String): if value is not None and type(value) != _STRINGTYPE: raise TypeError, 'new property "%s" not a string'%key @@ -429,8 +434,9 @@ class Class: changes[key] = oldvalue if hasattr(prop, 'isfilename') and prop.isfilename: propvalues[key] = os.path.basename(value) - if prop.indexme: - self.db.indexer.add_text((self.classname, nodeid, key), value, 'text/plain') + if prop.indexme and value is not None: + self.db.indexer.add_text((self.classname, nodeid, key), + value, 'text/plain') elif isinstance(prop, hyperdb.Password): if not isinstance(value, password.Password): @@ -1067,6 +1073,7 @@ class Indexer(indexer.Indexer): return self._getprops(classname).index(propname) def _getpropname(self, classname, propid): return self._getprops(classname)[propid] + def add_text(self, identifier, text, mime_type='text/plain'): if mime_type != 'text/plain': return @@ -1096,6 +1103,7 @@ class Indexer(indexer.Indexer): if len(hits)==0 or hits.find(pos=pos) < 0: hits.append(pos=pos) self.changed = 1 + def find(self, wordlist): hits = None index = self.db.view('index').ordered(1) diff --git a/test/test_db.py b/test/test_db.py index a00a9d3..ed5d718 100644 --- a/test/test_db.py +++ b/test/test_db.py @@ -15,7 +15,7 @@ # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. # -# $Id: test_db.py,v 1.45 2002-09-12 04:21:20 richard Exp $ +# $Id: test_db.py,v 1.46 2002-09-13 08:20:13 richard Exp $ import unittest, os, shutil, time @@ -80,10 +80,15 @@ class anydbmDBTestCase(MyTestCase): setupSchema(self.db2, 0, anydbm) def testStringChange(self): + # test set & retrieve self.db.issue.create(title="spam", status='1') self.assertEqual(self.db.issue.get('1', 'title'), 'spam') + + # change and make sure we retrieve the correct value self.db.issue.set('1', title='eggs') self.assertEqual(self.db.issue.get('1', 'title'), 'eggs') + + # do some commit stuff self.db.commit() self.assertEqual(self.db.issue.get('1', 'title'), 'eggs') self.db.issue.create(title="spam", status='1') @@ -93,6 +98,8 @@ class anydbmDBTestCase(MyTestCase): self.assertEqual(self.db.issue.get('2', 'title'), 'ham') self.db.commit() self.assertEqual(self.db.issue.get('2', 'title'), 'ham') + + # make sure we can unset self.db.issue.set('1', title=None) self.assertEqual(self.db.issue.get('1', "title"), None) @@ -598,8 +605,13 @@ class metakitDBTestCase(anydbmDBTestCase): os.makedirs(config.DATABASE + '/files') self.db = metakit.Database(config, 'test') setupSchema(self.db, 1, metakit) - self.db2 = metakit.Database(config, 'test') - setupSchema(self.db2, 0, metakit) + #self.db2 = metakit.Database(config, 'test') + #setupSchema(self.db2, 0, metakit) + + def testIDGeneration(self): + id1 = self.db.issue.create(title="spam", status='1') + id2 = self.db.issue.create(title="eggs", status='2') + self.assertNotEqual(id1, id2) def testTransactions(self): # remember the number of items we started @@ -639,8 +651,8 @@ class metakitReadOnlyDBTestCase(anydbmReadOnlyDBTestCase): setupSchema(db, 1, metakit) self.db = metakit.Database(config) setupSchema(self.db, 0, metakit) - self.db2 = metakit.Database(config, 'test') - setupSchema(self.db2, 0, metakit) +# self.db2 = metakit.Database(config, 'test') +# setupSchema(self.db2, 0, metakit) def suite(): l = [