summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: 52c553f)
raw | patch | inline | side by side (parent: 52c553f)
author | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Fri, 13 Sep 2002 08:20:13 +0000 (08:20 +0000) | ||
committer | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Fri, 13 Sep 2002 08:20:13 +0000 (08:20 +0000) |
. handling of None for Date/Interval/Password values in export/import
. handling of journal values in export/import
Also played with metakit backend some, fixing some of the unit tests
breakages. Hrm.
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1166 57a73879-2fb5-44c3-a270-3262357dd7e2
. handling of journal values in export/import
Also played with metakit backend some, fixing some of the unit tests
breakages. Hrm.
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1166 57a73879-2fb5-44c3-a270-3262357dd7e2
diff --git a/CHANGES.txt b/CHANGES.txt
index 343e51eb45e20b155df6be615c8cc801b9450da4..db67d4486c017305f4dac326d08102f4aa585e79 100644 (file)
--- a/CHANGES.txt
+++ b/CHANGES.txt
This file contains the changes to the Roundup system over time. The entries
are given with the most recent entry first.
+2002-09-?? 0.5.0 ????
+ . handling of None for Date/Interval/Password values in export/import
+ . handling of journal values in export/import
+
2002-09-13 0.5.0 beta2
. all backends now have a .close() method, and it's used everywhere
. fixed bug in detectors __init__
index 17210441957189786efa97dbd67cb9fd03813dad..f36213d0ecab9f70b8d3ca937793c68405212493 100644 (file)
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-#$Id: back_anydbm.py,v 1.77 2002-09-12 07:23:23 richard Exp $
+#$Id: back_anydbm.py,v 1.78 2002-09-13 08:20:07 richard Exp $
'''
This module defines a backend that saves the hyperdatabase in a database
chosen by anydbm. It is guaranteed to always be available in python
if os.path.exists(path):
db_type = whichdb.whichdb(path)
if not db_type:
- raise hyperdb.DatabaseError, "Couldn't identify database type"
+ raise DatabaseError, "Couldn't identify database type"
elif os.path.exists(path+'.db'):
# if the path ends in '.db', it's a dbm database, whether
# anydbm says it's dbhash or not!
try:
dbm = __import__(db_type)
except ImportError:
- raise hyperdb.DatabaseError, \
+ raise DatabaseError, \
"Couldn't open database - the required module '%s'"\
" is not available"%db_type
if __debug__:
#
# Journal
#
- def addjournal(self, classname, nodeid, action, params):
+ def addjournal(self, classname, nodeid, action, params, creator=None,
+ creation=None):
''' Journal the Action
'action' may be:
'''
if __debug__:
print >>hyperdb.DEBUG, 'addjournal', (self, classname, nodeid,
- action, params)
+ action, params, creator, creation)
self.transactions.append((self.doSaveJournal, (classname, nodeid,
- action, params)))
+ action, params, creator, creation)))
def getjournal(self, classname, nodeid):
''' get the journal for id
self.databases[db_name] = self.opendb(db_name, 'c')
return self.databases[db_name]
- def doSaveJournal(self, classname, nodeid, action, params):
- # handle supply of the special journalling parameters (usually
- # supplied on importing an existing database)
+ def doSaveJournal(self, classname, nodeid, action, params, creator,
+ creation):
+ # serialise the parameters now if necessary
if isinstance(params, type({})):
- if params.has_key('creator'):
- journaltag = self.user.get(params['creator'], 'username')
- del params['creator']
- else:
- journaltag = self.journaltag
- if params.has_key('created'):
- journaldate = params['created'].serialise()
- del params['created']
- else:
- journaldate = date.Date().serialise()
- if params.has_key('activity'):
- del params['activity']
-
- # serialise the parameters now
if action in ('set', 'create'):
params = self.serialise(classname, params)
+
+ # handle supply of the special journalling parameters (usually
+ # supplied on importing an existing database)
+ if creator:
+ journaltag = creator
else:
journaltag = self.journaltag
+ if creation:
+ journaldate = creation.serialise()
+ else:
journaldate = date.Date().serialise()
# create the journal entry
proptype = properties[prop]
value = self.get(nodeid, prop)
# "marshal" data where needed
- if isinstance(proptype, hyperdb.Date):
+ if value is None:
+ pass
+ elif isinstance(proptype, hyperdb.Date):
value = value.get_tuple()
elif isinstance(proptype, hyperdb.Interval):
value = value.get_tuple()
if propname == 'id':
newid = value
continue
+ elif value is None:
+ # don't set Nones
+ continue
elif isinstance(prop, hyperdb.Date):
value = date.Date(value)
elif isinstance(prop, hyperdb.Interval):
pwd = password.Password()
pwd.unpack(value)
value = pwd
- if value is not None:
- d[propname] = value
+ d[propname] = value
+
+ # extract the extraneous journalling gumpf and nuke it
+ if d.has_key('creator'):
+ creator = d['creator']
+ del d['creator']
+ else:
+ creator = None
+ if d.has_key('creation'):
+ creation = d['creation']
+ del d['creation']
+ else:
+ creation = None
+ if d.has_key('activity'):
+ del d['activity']
- # add
+ # add the node and journal
self.db.addnode(self.classname, newid, d)
- self.db.addjournal(self.classname, newid, 'create', d)
+ self.db.addjournal(self.classname, newid, 'create', d, creator,
+ creation)
return newid
def get(self, nodeid, propname, default=_marker, cache=1):
# extract the "content" property from the proplist
i = propnames.index('content')
- content = proplist[i]
+ content = eval(proplist[i])
del propnames[i]
del proplist[i]
index 9ffd906821caf64593ea9d939ac34aabc8ddd056..6f7aa9d7a15dd5a153282e3c9185b7878cab6109 100644 (file)
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-#$Id: back_bsddb.py,v 1.23 2002-09-10 00:11:50 richard Exp $
+#$Id: back_bsddb.py,v 1.24 2002-09-13 08:20:11 richard Exp $
'''
This module defines a backend that saves the hyperdatabase in BSDDB.
'''
self.databases[db_name] = db
return db
- def doSaveJournal(self, classname, nodeid, action, params):
- # serialise first
- if action in ('set', 'create'):
- params = self.serialise(classname, params)
-
- entry = (nodeid, date.Date().get_tuple(), self.journaltag, action,
- params)
-
- if __debug__:
- print >>hyperdb.DEBUG, 'doSaveJournal', entry
-
- db = self.getCachedJournalDB(classname)
-
- if db.has_key(nodeid):
- s = db[nodeid]
- l = marshal.loads(s)
- l.append(entry)
- else:
- l = [entry]
-
- db[nodeid] = marshal.dumps(l)
index 0a701950871ebeb7e363fd0f9b9f86750f8123e5..be4d80d8a13e1d5ab746729f8dd0c2aa1c05f4ba 100644 (file)
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-#$Id: back_bsddb3.py,v 1.16 2002-09-10 00:11:50 richard Exp $
+#$Id: back_bsddb3.py,v 1.17 2002-09-13 08:20:12 richard Exp $
import bsddb3, os, marshal
from roundup import hyperdb, date
self.databases[db_name] = db
return db
- def doSaveJournal(self, classname, nodeid, action, params):
- # serialise first
- if action in ('set', 'create'):
- params = self.serialise(classname, params)
-
- entry = (nodeid, date.Date().get_tuple(), self.journaltag, action,
- params)
-
- if __debug__:
- print >>hyperdb.DEBUG, 'doSaveJournal', entry
-
- db = self.getCachedJournalDB(classname)
-
- if db.has_key(nodeid):
- s = db[nodeid]
- l = marshal.loads(s)
- l.append(entry)
- else:
- l = [entry]
-
- db[nodeid] = marshal.dumps(l)
index a4add917211a5fd5976ba184a06da0d3297289f6..b6579bf98b9aaacbb5d800d4ef377c2a9b386770 100644 (file)
-# $Id: back_gadfly.py,v 1.18 2002-09-12 07:23:23 richard Exp $
+# $Id: back_gadfly.py,v 1.19 2002-09-13 08:20:13 richard Exp $
__doc__ = '''
About Gadfly
============
cursor.execute(sql, (retired,))
return [x[0] for x in cursor.fetchall()]
- def addjournal(self, classname, nodeid, action, params):
+ def addjournal(self, classname, nodeid, action, params, creator=None,
+ creation=None):
''' Journal the Action
'action' may be:
'link' or 'unlink' -- 'params' is (classname, nodeid, propname)
'retire' -- 'params' is None
'''
+ # serialise the parameters now if necessary
if isinstance(params, type({})):
- if params.has_key('creator'):
- journaltag = self.user.get(params['creator'], 'username')
- del params['creator']
- else:
- journaltag = self.journaltag
- if params.has_key('created'):
- journaldate = params['created'].serialise()
- del params['created']
- else:
- journaldate = date.Date().serialise()
- if params.has_key('activity'):
- del params['activity']
-
- # serialise the parameters now
if action in ('set', 'create'):
params = self.serialise(classname, params)
+
+ # handle supply of the special journalling parameters (usually
+ # supplied on importing an existing database)
+ if creator:
+ journaltag = creator
else:
journaltag = self.journaltag
+ if creation:
+ journaldate = creation.serialise()
+ else:
journaldate = date.Date().serialise()
# create the journal entry
entry = (nodeid, journaldate, journaltag, action, params)
if __debug__:
- print >>hyperdb.DEBUG, 'doSaveJournal', entry
+ print >>hyperdb.DEBUG, 'addjournal', entry
# do the insert
cursor = self.conn.cursor()
return newid
+ def export_list(self, propnames, nodeid):
+ ''' Export a node - generate a list of CSV-able data in the order
+ specified by propnames for the given node.
+ '''
+ properties = self.getprops()
+ l = []
+ for prop in propnames:
+ proptype = properties[prop]
+ value = self.get(nodeid, prop)
+ # "marshal" data where needed
+ if value is None:
+ pass
+ elif isinstance(proptype, hyperdb.Date):
+ value = value.get_tuple()
+ elif isinstance(proptype, hyperdb.Interval):
+ value = value.get_tuple()
+ elif isinstance(proptype, hyperdb.Password):
+ value = str(value)
+ l.append(repr(value))
+ return l
+
+ def import_list(self, propnames, proplist):
+ ''' Import a node - all information including "id" is present and
+ should not be sanity checked. Triggers are not triggered. The
+ journal should be initialised using the "creator" and "created"
+ information.
+
+ Return the nodeid of the node imported.
+ '''
+ if self.db.journaltag is None:
+ raise DatabaseError, 'Database open read-only'
+ properties = self.getprops()
+
+ # make the new node's property map
+ d = {}
+ for i in range(len(propnames)):
+ # Use eval to reverse the repr() used to output the CSV
+ value = eval(proplist[i])
+
+ # Figure the property for this column
+ propname = propnames[i]
+ prop = properties[propname]
+
+ # "unmarshal" where necessary
+ if propname == 'id':
+ newid = value
+ continue
+ elif value is None:
+ # don't set Nones
+ continue
+ elif isinstance(prop, hyperdb.Date):
+ value = date.Date(value)
+ elif isinstance(prop, hyperdb.Interval):
+ value = date.Interval(value)
+ elif isinstance(prop, hyperdb.Password):
+ pwd = password.Password()
+ pwd.unpack(value)
+ value = pwd
+ d[propname] = value
+
+ # extract the extraneous journalling gumpf and nuke it
+ if d.has_key('creator'):
+ creator = d['creator']
+ del d['creator']
+ if d.has_key('creation'):
+ creation = d['creation']
+ del d['creation']
+ if d.has_key('activity'):
+ del d['activity']
+
+ # add the node and journal
+ self.db.addnode(self.classname, newid, d)
+ self.db.addjournal(self.classname, newid, 'create', d, creator,
+ creation)
+ return newid
+
_marker = []
def get(self, nodeid, propname, default=_marker, cache=1):
'''Get the value of a property on an existing node of this class.
# extract the "content" property from the proplist
i = propnames.index('content')
- content = proplist[i]
+ content = eval(proplist[i])
del propnames[i]
del proplist[i]
index 1c562ab6396c02f3cc5f2617c172927730d38796..c7592a4cd13da87c1ec6b9e70ec17bab8d5f756f 100755 (executable)
for cl in self.classes.values():
cl.db = None
self._db = None
- locking.release_lock(self.lockfile)
- del _dbs[self.config.DATABASE]
- self.lockfile.close()
+ if self.lockfile is not None:
+ locking.release_lock(self.lockfile)
+ if _dbs.has_key(self.config.DATABASE):
+ del _dbs[self.config.DATABASE]
+ if self.lockfile is not None:
+ self.lockfile.close()
+ self.lockfile = None
self.classes = {}
self.indexer = None
raise IndexError, "%s has no node %s" % (self.classname, nodeid)
oldnode = self.uncommitted.setdefault(id, {})
changes = {}
-
+
for key, value in propvalues.items():
# this will raise the KeyError if the property isn't valid
# ... we don't use getprops() here because we only care about
rmvd.append(id)
# register the unlink with the old linked node
if self.do_journal and prop.do_journal:
- self.db.addjournal(link_class, id, _UNLINK, (self.classname, str(row.id), key))
+ self.db.addjournal(link_class, id, _UNLINK,
+ (self.classname, str(row.id), key))
# handle additions
adds = []
adds.append(id)
# register the link with the newly linked node
if self.do_journal and prop.do_journal:
- self.db.addjournal(link_class, id, _LINK, (self.classname, str(row.id), key))
+ self.db.addjournal(link_class, id, _LINK,
+ (self.classname, str(row.id), key))
sv = getattr(row, key)
i = 0
if not rmvd and not adds:
del propvalues[key]
-
elif isinstance(prop, hyperdb.String):
if value is not None and type(value) != _STRINGTYPE:
raise TypeError, 'new property "%s" not a string'%key
changes[key] = oldvalue
if hasattr(prop, 'isfilename') and prop.isfilename:
propvalues[key] = os.path.basename(value)
- if prop.indexme:
- self.db.indexer.add_text((self.classname, nodeid, key), value, 'text/plain')
+ if prop.indexme and value is not None:
+ self.db.indexer.add_text((self.classname, nodeid, key),
+ value, 'text/plain')
elif isinstance(prop, hyperdb.Password):
if not isinstance(value, password.Password):
return self._getprops(classname).index(propname)
def _getpropname(self, classname, propid):
return self._getprops(classname)[propid]
+
def add_text(self, identifier, text, mime_type='text/plain'):
if mime_type != 'text/plain':
return
if len(hits)==0 or hits.find(pos=pos) < 0:
hits.append(pos=pos)
self.changed = 1
+
def find(self, wordlist):
hits = None
index = self.db.view('index').ordered(1)
diff --git a/test/test_db.py b/test/test_db.py
index a00a9d341b5db4876765cf89e1a1a07146b9aa25..ed5d718e9b201b11f31ad589c0562c912a90f667 100644 (file)
--- a/test/test_db.py
+++ b/test/test_db.py
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: test_db.py,v 1.45 2002-09-12 04:21:20 richard Exp $
+# $Id: test_db.py,v 1.46 2002-09-13 08:20:13 richard Exp $
import unittest, os, shutil, time
setupSchema(self.db2, 0, anydbm)
def testStringChange(self):
+ # test set & retrieve
self.db.issue.create(title="spam", status='1')
self.assertEqual(self.db.issue.get('1', 'title'), 'spam')
+
+ # change and make sure we retrieve the correct value
self.db.issue.set('1', title='eggs')
self.assertEqual(self.db.issue.get('1', 'title'), 'eggs')
+
+ # do some commit stuff
self.db.commit()
self.assertEqual(self.db.issue.get('1', 'title'), 'eggs')
self.db.issue.create(title="spam", status='1')
self.assertEqual(self.db.issue.get('2', 'title'), 'ham')
self.db.commit()
self.assertEqual(self.db.issue.get('2', 'title'), 'ham')
+
+ # make sure we can unset
self.db.issue.set('1', title=None)
self.assertEqual(self.db.issue.get('1', "title"), None)
os.makedirs(config.DATABASE + '/files')
self.db = metakit.Database(config, 'test')
setupSchema(self.db, 1, metakit)
- self.db2 = metakit.Database(config, 'test')
- setupSchema(self.db2, 0, metakit)
+ #self.db2 = metakit.Database(config, 'test')
+ #setupSchema(self.db2, 0, metakit)
+
+ def testIDGeneration(self):
+ id1 = self.db.issue.create(title="spam", status='1')
+ id2 = self.db.issue.create(title="eggs", status='2')
+ self.assertNotEqual(id1, id2)
def testTransactions(self):
# remember the number of items we started
setupSchema(db, 1, metakit)
self.db = metakit.Database(config)
setupSchema(self.db, 0, metakit)
- self.db2 = metakit.Database(config, 'test')
- setupSchema(self.db2, 0, metakit)
+# self.db2 = metakit.Database(config, 'test')
+# setupSchema(self.db2, 0, metakit)
def suite():
l = [