index 3c29f4f5d42aa151644d7f0719091b00d4aaf247..cf3f59f99c594947f43e5771f2d5deb94f7c2093 100755 (executable)
-from roundup import hyperdb, date, password, roundupdb
+# $Id: back_metakit.py,v 1.62 2004-03-18 01:58:45 richard Exp $
+'''Metakit backend for Roundup, originally by Gordon McMillan.
+
+Known Current Bugs:
+
+- You can't change a class' key properly. This shouldn't be too hard to fix.
+- Some unit tests are overridden.
+
+Notes by Richard:
+
+This backend has some behaviour specific to metakit:
+
+- there's no concept of an explicit "unset" in metakit, so all types
+ have some "unset" value:
+
+ ========= ===== ======================================================
+ Type Value Action when fetching from mk
+ ========= ===== ======================================================
+ Strings '' convert to None
+ Date 0 (seconds since 1970-01-01.00:00:00) convert to None
+ Interval '' convert to None
+ Number 0 ambiguious :( - do nothing (see BACKWARDS_COMPATIBLE)
+ Boolean 0 ambiguious :( - do nothing (see BACKWARDS_COMPATABILE)
+ Link 0 convert to None
+ Multilink [] actually, mk can handle this one ;)
+ Password '' convert to None
+ ========= ===== ======================================================
+
+ The get/set routines handle these values accordingly by converting
+ to/from None where they can. The Number/Boolean types are not able
+ to handle an "unset" at all, so they default the "unset" to 0.
+- Metakit relies in reference counting to close the database, there is
+ no explicit close call. This can cause issues if a metakit
+ database is referenced multiple times, one might not actually be
+ closing the db.
+- probably a bunch of stuff that I'm not aware of yet because I haven't
+ fully read through the source. One of these days....
+'''
+__docformat__ = 'restructuredtext'
+# Enable this flag to break backwards compatibility (i.e. can't read old
+# databases) but comply with more roundup features, like adding NULL support.
+BACKWARDS_COMPATIBLE = True
+
+from roundup import hyperdb, date, password, roundupdb, security
import metakit
-import re, marshal, os, sys, weakref, time, calendar
-from roundup.indexer import Indexer
+from sessions_dbm import Sessions, OneTimeKeys
+import re, marshal, os, sys, time, calendar
+from roundup import indexer
+import locking
+from roundup.date import Range
-_instances = weakref.WeakValueDictionary()
+# view modes for opening
+# XXX FIXME BPK -> these don't do anything, they are ignored
+# should we just get rid of them for simplicities sake?
+READ = 0
+READWRITE = 1
+
+# general metakit error
+class MKBackendError(Exception):
+ pass
+
+_dbs = {}
def Database(config, journaltag=None):
- if _instances.has_key(id(config)):
- db = _instances[id(config)]
- old = db.journaltag
- db.journaltag = journaltag
- if hasattr(db, 'curuserid'):
- delattr(db, 'curuserid')
- return db
- else:
+ ''' Only have a single instance of the Database class for each instance
+ '''
+ db = _dbs.get(config.DATABASE, None)
+ if db is None or db._db is None:
db = _Database(config, journaltag)
- _instances[id(config)] = db
- return db
+ _dbs[config.DATABASE] = db
+ else:
+ db.journaltag = journaltag
+ return db
-class _Database(hyperdb.Database):
+class _Database(hyperdb.Database, roundupdb.Database):
def __init__(self, config, journaltag=None):
self.config = config
self.journaltag = journaltag
self.classes = {}
- self._classes = []
self.dirty = 0
- self.__RW = 0
+ self.lockfile = None
self._db = self.__open()
- self.indexer = Indexer(self.config.DATABASE)
+ self.indexer = Indexer(self.config.DATABASE, self._db)
+ self.security = security.Security(self)
+
os.umask(0002)
+
def post_init(self):
if self.indexer.should_reindex():
self.reindex()
+ def refresh_database(self):
+ # XXX handle refresh
+ self.reindex()
+
def reindex(self):
for klass in self.classes.values():
for nodeid in klass.list():
klass.index(nodeid)
self.indexer.save_index()
-
-
+
+ def getSessionManager(self):
+ return Sessions(self)
+
+ def getOTKManager(self):
+ return OneTimeKeys(self)
+
# --- defined in ping's spec
def __getattr__(self, classname):
- if classname == 'curuserid':
- try:
- self.curuserid = x = int(self.classes['user'].lookup(self.journaltag))
- except KeyError:
- x = 0
- return x
- return self.getclass(classname)
+ if classname == 'transactions':
+ return self.dirty
+ # fall back on the classes
+ try:
+ return self.getclass(classname)
+ except KeyError, msg:
+ # KeyError's not appropriate here
+ raise AttributeError, str(msg)
def getclass(self, classname):
- return self.classes[classname]
+ try:
+ return self.classes[classname]
+ except KeyError:
+ raise KeyError, 'There is no class called "%s"'%classname
def getclasses(self):
return self.classes.keys()
# --- end of ping's spec
+
# --- exposed methods
def commit(self):
+ '''commit all changes to the database'''
if self.dirty:
- if self.__RW:
- self._db.commit()
- for cl in self.classes.values():
- cl._commit()
- self.indexer.save_index()
- else:
- raise RuntimeError, "metakit is open RO"
+ self._db.commit()
+ for cl in self.classes.values():
+ cl._commit()
+ self.indexer.save_index()
self.dirty = 0
def rollback(self):
+ '''roll back all changes since the last commit'''
if self.dirty:
for cl in self.classes.values():
cl._rollback()
self._db.rollback()
+ self._db = None
+ self._db = metakit.storage(self.dbnm, 1)
+ self.hist = self._db.view('history')
+ self.tables = self._db.view('tables')
+ self.indexer.rollback()
+ self.indexer.datadb = self._db
self.dirty = 0
+ def clearCache(self):
+ '''clear the internal cache by committing all pending database changes'''
+ for cl in self.classes.values():
+ cl._commit()
def clear(self):
+ '''clear the internal cache but don't commit any changes'''
for cl in self.classes.values():
cl._clear()
def hasnode(self, classname, nodeid):
- return self.getclass(clasname).hasnode(nodeid)
+ '''does a particular class contain a nodeid?'''
+ return self.getclass(classname).hasnode(nodeid)
def pack(self, pack_before):
- pass
+ ''' Delete all journal entries except "create" before 'pack_before'.
+ '''
+ mindate = int(calendar.timegm(pack_before.get_tuple()))
+ i = 0
+ while i < len(self.hist):
+ if self.hist[i].date < mindate and self.hist[i].action != _CREATE:
+ self.hist.delete(i)
+ else:
+ i = i + 1
def addclass(self, cl):
+ ''' Add a Class to the hyperdatabase.
+ '''
self.classes[cl.classname] = cl
- def addjournal(self, tablenm, nodeid, action, params):
+ if self.tables.find(name=cl.classname) < 0:
+ self.tables.append(name=cl.classname)
+
+ # add default Edit and View permissions
+ self.security.addPermission(name="Edit", klass=cl.classname,
+ description="User is allowed to edit "+cl.classname)
+ self.security.addPermission(name="View", klass=cl.classname,
+ description="User is allowed to access "+cl.classname)
+
+ def addjournal(self, tablenm, nodeid, action, params, creator=None,
+ creation=None):
+ ''' Journal the Action
+ 'action' may be:
+
+ 'create' or 'set' -- 'params' is a dictionary of property values
+ 'link' or 'unlink' -- 'params' is (classname, nodeid, propname)
+ 'retire' -- 'params' is None
+ '''
tblid = self.tables.find(name=tablenm)
if tblid == -1:
tblid = self.tables.append(name=tablenm)
+ if creator is None:
+ creator = int(self.getuid())
+ else:
+ try:
+ creator = int(creator)
+ except TypeError:
+ creator = int(self.getclass('user').lookup(creator))
+ if creation is None:
+ creation = int(time.time())
+ elif isinstance(creation, date.Date):
+ creation = int(calendar.timegm(creation.get_tuple()))
# tableid:I,nodeid:I,date:I,user:I,action:I,params:B
self.hist.append(tableid=tblid,
nodeid=int(nodeid),
- date=int(time.time()),
+ date=creation,
action=action,
- user = self.curuserid,
+ user = creator,
params = marshal.dumps(params))
- def gethistory(self, tablenm, nodeid):
+ def getjournal(self, tablenm, nodeid):
+ ''' get the journal for id
+ '''
rslt = []
tblid = self.tables.find(name=tablenm)
if tblid == -1:
return rslt
q = self.hist.select(tableid=tblid, nodeid=int(nodeid))
+ if len(q) == 0:
+ raise IndexError, "no history for id %s in %s" % (nodeid, tablenm)
i = 0
- userclass = self.getclass('user')
+ #userclass = self.getclass('user')
for row in q:
try:
params = marshal.loads(row.params)
except ValueError:
print "history couldn't unmarshal %r" % row.params
params = {}
- usernm = userclass.get(str(row.user), 'username')
+ #usernm = userclass.get(str(row.user), 'username')
dt = date.Date(time.gmtime(row.date))
- rslt.append((i, dt, usernm, _actionnames[row.action], params))
- i += 1
+ #rslt.append((nodeid, dt, usernm, _actionnames[row.action], params))
+ rslt.append((nodeid, dt, str(row.user), _actionnames[row.action],
+ params))
return rslt
-
+
+ def destroyjournal(self, tablenm, nodeid):
+ nodeid = int(nodeid)
+ tblid = self.tables.find(name=tablenm)
+ if tblid == -1:
+ return
+ i = 0
+ hist = self.hist
+ while i < len(hist):
+ if hist[i].tableid == tblid and hist[i].nodeid == nodeid:
+ hist.delete(i)
+ else:
+ i = i + 1
+ self.dirty = 1
+
def close(self):
- import time
- now = time.time
- start = now()
+ ''' Close off the connection.
+ '''
+ # de-reference count the metakit databases,
+ # as this is the only way they will be closed
for cl in self.classes.values():
cl.db = None
- #self._db.rollback()
- #print "pre-close cleanup of DB(%d) took %2.2f secs" % (self.__RW, now()-start)
self._db = None
- #print "close of DB(%d) took %2.2f secs" % (self.__RW, now()-start)
+ if self.lockfile is not None:
+ locking.release_lock(self.lockfile)
+ if _dbs.has_key(self.config.DATABASE):
+ del _dbs[self.config.DATABASE]
+ if self.lockfile is not None:
+ self.lockfile.close()
+ self.lockfile = None
self.classes = {}
- try:
- del _instances[id(self.config)]
- except KeyError:
- pass
- self.__RW = 0
-
+
+ # force the indexer to close
+ self.indexer.close()
+ self.indexer = None
+
# --- internal
def __open(self):
+ ''' Open the metakit database
+ '''
+ # make the database dir if it doesn't exist
+ if not os.path.exists(self.config.DATABASE):
+ os.makedirs(self.config.DATABASE)
+
+ # figure the file names
self.dbnm = db = os.path.join(self.config.DATABASE, 'tracker.mk4')
+ lockfilenm = db[:-3]+'lck'
+
+ # get the database lock
+ self.lockfile = locking.acquire_lock(lockfilenm)
+ self.lockfile.write(str(os.getpid()))
+ self.lockfile.flush()
+
+ # see if the schema has changed since last db access
self.fastopen = 0
if os.path.exists(db):
dbtm = os.path.getmtime(db)
else:
# can't find schemamod - must be frozen
self.fastopen = 1
- else:
- self.__RW = 1
- if not self.fastopen:
- self.__RW = 1
- db = metakit.storage(db, self.__RW)
+
+ # open the db
+ db = metakit.storage(db, 1)
hist = db.view('history')
tables = db.view('tables')
if not self.fastopen:
+ # create the database if it's brand new
if not hist.structure():
hist = db.getas('history[tableid:I,nodeid:I,date:I,user:I,action:I,params:B]')
if not tables.structure():
tables = db.getas('tables[name:S]')
+ db.commit()
+
+ # we now have an open, initialised database
self.tables = tables
self.hist = hist
return db
- def isReadOnly(self):
- return self.__RW == 0
- def getWriteAccess(self):
- if self.journaltag is not None and self.__RW == 0:
- now = time.time
- start = now()
- self._db = None
- #print "closing the file took %2.2f secs" % (now()-start)
- start = now()
- self._db = metakit.storage(self.dbnm, 1)
- self.__RW = 1
- self.hist = self._db.view('history')
- self.tables = self._db.view('tables')
- #print "getting RW access took %2.2f secs" % (now()-start)
-
+
+ def setid(self, classname, maxid):
+ ''' No-op in metakit
+ '''
+ pass
+
_STRINGTYPE = type('')
_LISTTYPE = type([])
-_CREATE, _SET, _RETIRE, _LINK, _UNLINK = range(5)
+_CREATE, _SET, _RETIRE, _LINK, _UNLINK, _RESTORE = range(6)
_actionnames = {
_CREATE : 'create',
_SET : 'set',
_RETIRE : 'retire',
+ _RESTORE : 'restore',
_LINK : 'link',
_UNLINK : 'unlink',
}
_ALLOWSETTINGPRIVATEPROPS = 0
-class Class: # no, I'm not going to subclass the existing!
+class Class(hyperdb.Class):
+ ''' The handle to a particular class of nodes in a hyperdatabase.
+
+ All methods except __repr__ and getnode must be implemented by a
+ concrete backend Class of which this is one.
+ '''
+
privateprops = None
def __init__(self, db, classname, **properties):
- self.db = weakref.proxy(db)
+ if (properties.has_key('creation') or properties.has_key('activity')
+ or properties.has_key('creator')):
+ raise ValueError, '"creation", "activity" and "creator" are '\
+ 'reserved'
+ if hasattr(db, classname):
+ raise ValueError, "Class %s already exists"%classname
+
+ self.db = db
self.classname = classname
- self.keyname = None
+ self.key = None
self.ruprops = properties
self.privateprops = { 'id' : hyperdb.String(),
'activity' : hyperdb.Date(),
'creation' : hyperdb.Date(),
'creator' : hyperdb.Link('user') }
- self.auditors = {'create': [], 'set': [], 'retire': []} # event -> list of callables
- self.reactors = {'create': [], 'set': [], 'retire': []} # ditto
+
+ # event -> list of callables
+ self.auditors = {'create': [], 'set': [], 'retire': [], 'restore': []}
+ self.reactors = {'create': [], 'set': [], 'retire': [], 'restore': []}
+
view = self.__getview()
self.maxid = 1
if view:
self.maxid = view[-1].id + 1
self.uncommitted = {}
self.rbactions = []
+
# people reach inside!!
self.properties = self.ruprops
self.db.addclass(self)
self.idcache = {}
+
+ # default is to journal changes
+ self.do_journal = 1
+
+ def enableJournalling(self):
+ '''Turn journalling on for this class
+ '''
+ self.do_journal = 1
+
+ def disableJournalling(self):
+ '''Turn journalling off for this class
+ '''
+ self.do_journal = 0
- # --- the roundup.Class methods
+ #
+ # Detector/reactor interface
+ #
def audit(self, event, detector):
+ '''Register a detector
+ '''
l = self.auditors[event]
if detector not in l:
self.auditors[event].append(detector)
+
def fireAuditors(self, action, nodeid, newvalues):
- for audit in self.auditors[action]:
+ '''Fire all registered auditors.
+ '''
+ for audit in self.auditors[action]:
audit(self.db, self, nodeid, newvalues)
+
+ def react(self, event, detector):
+ '''Register a reactor
+ '''
+ l = self.reactors[event]
+ if detector not in l:
+ self.reactors[event].append(detector)
+
def fireReactors(self, action, nodeid, oldvalues):
+ '''Fire all registered reactors.
+ '''
for react in self.reactors[action]:
react(self.db, self, nodeid, oldvalues)
- def react(self, event, detector):
- l = self.reactors[event]
- if detector not in l:
- self.reactors[event].append(detector)
+
# --- the hyperdb.Class methods
def create(self, **propvalues):
- rowdict = {}
- rowdict['id'] = newid = self.maxid
- self.maxid += 1
- ndx = self.getview(1).append(rowdict)
- propvalues['#ISNEW'] = 1
- try:
- self.set(str(newid), **propvalues)
- except Exception:
- self.maxid -= 1
- raise
- return str(newid)
+ ''' Create a new node of this class and return its id.
+
+ The keyword arguments in 'propvalues' map property names to values.
+
+ The values of arguments must be acceptable for the types of their
+ corresponding properties or a TypeError is raised.
+
+ If this class has a key property, it must be present and its value
+ must not collide with other key strings or a ValueError is raised.
+
+ Any other properties on this class that are missing from the
+ 'propvalues' dictionary are set to None.
+
+ If an id in a link or multilink property does not refer to a valid
+ node, an IndexError is raised.
+ '''
+ if not propvalues:
+ raise ValueError, "Need something to create!"
+ self.fireAuditors('create', None, propvalues)
+ newid = self.create_inner(**propvalues)
+ # self.set() (called in self.create_inner()) does reactors)
+ return newid
+
+ def create_inner(self, **propvalues):
+ ''' Called by create, in-between the audit and react calls.
+ '''
+ rowdict = {}
+ rowdict['id'] = newid = self.maxid
+ self.maxid += 1
+ ndx = self.getview(READWRITE).append(rowdict)
+ propvalues['#ISNEW'] = 1
+ try:
+ self.set(str(newid), **propvalues)
+ except Exception:
+ self.maxid -= 1
+ raise
+ return str(newid)
def get(self, nodeid, propname, default=_marker, cache=1):
- # default and cache aren't in the spec
- # cache=0 means "original value"
+ '''Get the value of a property on an existing node of this class.
+
+ 'nodeid' must be the id of an existing node of this class or an
+ IndexError is raised. 'propname' must be the name of a property
+ of this class or a KeyError is raised.
- view = self.getview()
+ 'cache' exists for backwards compatibility, and is not used.
+ '''
+ view = self.getview()
id = int(nodeid)
if cache == 0:
oldnode = self.uncommitted.get(id, None)
if oldnode and oldnode.has_key(propname):
- return oldnode[propname]
+ raw = oldnode[propname]
+ converter = _converters.get(rutyp.__class__, None)
+ if converter:
+ return converter(raw)
+ return raw
ndx = self.idcache.get(id, None)
+
if ndx is None:
ndx = view.find(id=id)
if ndx < 0:
raise IndexError, "%s has no node %s" % (self.classname, nodeid)
self.idcache[id] = ndx
- raw = getattr(view[ndx], propname)
+ try:
+ raw = getattr(view[ndx], propname)
+ except AttributeError:
+ raise KeyError, propname
rutyp = self.ruprops.get(propname, None)
+
if rutyp is None:
rutyp = self.privateprops[propname]
+
converter = _converters.get(rutyp.__class__, None)
if converter:
raw = converter(raw)
return raw
def set(self, nodeid, **propvalues):
+ '''Modify a property on an existing node of this class.
+ 'nodeid' must be the id of an existing node of this class or an
+ IndexError is raised.
+
+ Each key in 'propvalues' must be the name of a property of this
+ class or a KeyError is raised.
+
+ All values in 'propvalues' must be acceptable types for their
+ corresponding properties or a TypeError is raised.
+
+ If the value of the key property is set, it must not collide with
+ other key strings or a ValueError is raised.
+
+ If the value of a Link or Multilink property contains an invalid
+ node id, a ValueError is raised.
+ '''
isnew = 0
if propvalues.has_key('#ISNEW'):
isnew = 1
del propvalues['#ISNEW']
+ if not isnew:
+ self.fireAuditors('set', nodeid, propvalues)
if not propvalues:
- return
+ return propvalues
if propvalues.has_key('id'):
raise KeyError, '"id" is reserved'
if self.db.journaltag is None:
- raise DatabaseError, 'Database open read-only'
- view = self.getview(1)
+ raise hyperdb.DatabaseError, 'Database open read-only'
+ view = self.getview(READWRITE)
+
# node must exist & not be retired
id = int(nodeid)
ndx = view.find(id=id)
raise IndexError, "%s has no node %s" % (self.classname, nodeid)
oldnode = self.uncommitted.setdefault(id, {})
changes = {}
-
+
for key, value in propvalues.items():
# this will raise the KeyError if the property isn't valid
# ... we don't use getprops() here because we only care about
continue
# check to make sure we're not duplicating an existing key
- if key == self.keyname:
- iv = self.getindexview(1)
+ if key == self.key:
+ iv = self.getindexview(READWRITE)
ndx = iv.find(k=value)
if ndx == -1:
iv.append(k=value, i=row.id)
# do stuff based on the prop type
if isinstance(prop, hyperdb.Link):
link_class = prop.classname
+ # must be a string or None
+ if value is not None and not isinstance(value, type('')):
+ raise ValueError, 'property "%s" link value be a string'%(
+ key)
+ # Roundup sets to "unselected" by passing None
+ if value is None:
+ value = 0
# if it isn't a number, it's a key
- if type(value) != _STRINGTYPE:
- raise ValueError, 'link value must be String'
try:
int(value)
except ValueError:
raise IndexError, 'new property "%s": %s not a %s'%(
key, value, prop.classname)
- if not self.db.getclass(link_class).hasnode(value):
+ if (value is not None and
+ not self.db.getclass(link_class).hasnode(value)):
raise IndexError, '%s has no node %s'%(link_class, value)
setattr(row, key, int(value))
changes[key] = oldvalue
- if prop.do_journal:
+ if self.do_journal and prop.do_journal:
# register the unlink with the old linked node
if oldvalue:
- self.db.addjournal(link_class, value, _UNLINK, (self.classname, str(row.id), key))
+ self.db.addjournal(link_class, oldvalue, _UNLINK,
+ (self.classname, str(row.id), key))
# register the link with the newly linked node
if value:
- self.db.addjournal(link_class, value, _LINK, (self.classname, str(row.id), key))
+ self.db.addjournal(link_class, value, _LINK,
+ (self.classname, str(row.id), key))
elif isinstance(prop, hyperdb.Multilink):
- if type(value) != _LISTTYPE:
+ if value is not None and type(value) != _LISTTYPE:
raise TypeError, 'new property "%s" not a list of ids'%key
link_class = prop.classname
l = []
+ if value is None:
+ value = []
for entry in value:
if type(entry) != _STRINGTYPE:
raise ValueError, 'new property "%s" link value ' \
if id not in value:
rmvd.append(id)
# register the unlink with the old linked node
- if prop.do_journal:
- self.db.addjournal(link_class, id, _UNLINK, (self.classname, str(row.id), key))
+ if self.do_journal and prop.do_journal:
+ self.db.addjournal(link_class, id, _UNLINK,
+ (self.classname, str(row.id), key))
# handle additions
adds = []
link_class, id)
adds.append(id)
# register the link with the newly linked node
- if prop.do_journal:
- self.db.addjournal(link_class, id, _LINK, (self.classname, str(row.id), key))
-
+ if self.do_journal and prop.do_journal:
+ self.db.addjournal(link_class, id, _LINK,
+ (self.classname, str(row.id), key))
+
+ # perform the modifications on the actual property value
sv = getattr(row, key)
i = 0
while i < len(sv):
i += 1
for id in adds:
sv.append(fid=int(id))
- changes[key] = oldvalue
-
+
+ # figure the journal entry
+ l = []
+ if adds:
+ l.append(('+', adds))
+ if rmvd:
+ l.append(('-', rmvd))
+ if l:
+ changes[key] = tuple(l)
+ #changes[key] = oldvalue
+
+ if not rmvd and not adds:
+ del propvalues[key]
elif isinstance(prop, hyperdb.String):
if value is not None and type(value) != _STRINGTYPE:
raise TypeError, 'new property "%s" not a string'%key
+ if value is None:
+ value = ''
setattr(row, key, value)
changes[key] = oldvalue
if hasattr(prop, 'isfilename') and prop.isfilename:
propvalues[key] = os.path.basename(value)
if prop.indexme:
- self.db.indexer.add_text((self.classname, nodeid, key), value, 'text/plain')
+ self.db.indexer.add_text((self.classname, nodeid, key),
+ value, 'text/plain')
elif isinstance(prop, hyperdb.Password):
- if not isinstance(value, password.Password):
+ if value is not None and not isinstance(value, password.Password):
raise TypeError, 'new property "%s" not a Password'% key
+ if value is None:
+ value = ''
setattr(row, key, str(value))
changes[key] = str(oldvalue)
propvalues[key] = str(value)
- elif value is not None and isinstance(prop, hyperdb.Date):
- if not isinstance(value, date.Date):
+ elif isinstance(prop, hyperdb.Date):
+ if value is not None and not isinstance(value, date.Date):
raise TypeError, 'new property "%s" not a Date'% key
- setattr(row, key, int(calendar.timegm(value.get_tuple())))
+ if value is None:
+ setattr(row, key, 0)
+ else:
+ setattr(row, key, int(calendar.timegm(value.get_tuple())))
changes[key] = str(oldvalue)
propvalues[key] = str(value)
- elif value is not None and isinstance(prop, hyperdb.Interval):
- if not isinstance(value, date.Interval):
+ elif isinstance(prop, hyperdb.Interval):
+ if value is not None and not isinstance(value, date.Interval):
raise TypeError, 'new property "%s" not an Interval'% key
- setattr(row, key, str(value))
+ if value is None:
+ setattr(row, key, '')
+ else:
+ # kedder: we should store interval values serialized
+ setattr(row, key, value.serialise())
changes[key] = str(oldvalue)
propvalues[key] = str(value)
+
+ elif isinstance(prop, hyperdb.Number):
+ if value is None:
+ v = 0
+ else:
+ try:
+ v = int(value)
+ except ValueError:
+ raise TypeError, "%s (%s) is not numeric"%(key, repr(value))
+ if not BACKWARDS_COMPATIBLE:
+ if v >=0:
+ v = v + 1
+ setattr(row, key, v)
+ changes[key] = oldvalue
+ propvalues[key] = value
+
+ elif isinstance(prop, hyperdb.Boolean):
+ if value is None:
+ bv = 0
+ elif value not in (0,1):
+ raise TypeError, "%s (%s) is not boolean"%(key, repr(value))
+ else:
+ bv = value
+ if not BACKWARDS_COMPATIBLE:
+ bv += 1
+ setattr(row, key, bv)
+ changes[key] = oldvalue
+ propvalues[key] = value
oldnode[key] = oldvalue
# nothing to do?
if not propvalues:
- return
- if not row.activity:
+ return propvalues
+ if not propvalues.has_key('activity'):
row.activity = int(time.time())
if isnew:
if not row.creation:
row.creation = int(time.time())
if not row.creator:
- row.creator = self.db.curuserid
-
+ row.creator = int(self.db.getuid())
+
self.db.dirty = 1
- if isnew:
- self.db.addjournal(self.classname, nodeid, _CREATE, {})
- else:
- self.db.addjournal(self.classname, nodeid, _SET, changes)
+ if self.do_journal:
+ if isnew:
+ self.db.addjournal(self.classname, nodeid, _CREATE, {})
+ self.fireReactors('create', nodeid, None)
+ else:
+ self.db.addjournal(self.classname, nodeid, _SET, changes)
+ self.fireReactors('set', nodeid, oldnode)
+ return propvalues
+
def retire(self, nodeid):
- view = self.getview(1)
+ '''Retire a node.
+
+ The properties on the node remain available from the get() method,
+ and the node's id is never reused.
+
+ Retired nodes are not returned by the find(), list(), or lookup()
+ methods, and other nodes may reuse the values of their key properties.
+ '''
+ if self.db.journaltag is None:
+ raise hyperdb.DatabaseError, 'Database open read-only'
+ self.fireAuditors('retire', nodeid, None)
+ view = self.getview(READWRITE)
ndx = view.find(id=int(nodeid))
if ndx < 0:
raise KeyError, "nodeid %s not found" % nodeid
+
row = view[ndx]
oldvalues = self.uncommitted.setdefault(row.id, {})
oldval = oldvalues['_isdel'] = row._isdel
row._isdel = 1
- self.db.addjournal(self.classname, nodeid, _RETIRE, {})
- iv = self.getindexview(1)
- ndx = iv.find(k=getattr(row, self.keyname),i=row.id)
- if ndx > -1:
- iv.delete(ndx)
+
+ if self.do_journal:
+ self.db.addjournal(self.classname, nodeid, _RETIRE, {})
+ if self.key:
+ iv = self.getindexview(READWRITE)
+ ndx = iv.find(k=getattr(row, self.key))
+ # find is broken with multiple attribute lookups
+ # on ordered views
+ #ndx = iv.find(k=getattr(row, self.key),i=row.id)
+ if ndx > -1 and iv[ndx].i == row.id:
+ iv.delete(ndx)
+
+ self.db.dirty = 1
+ self.fireReactors('retire', nodeid, None)
+
+ def restore(self, nodeid):
+ '''Restore a retired node.
+
+ Make node available for all operations like it was before retirement.
+ '''
+ if self.db.journaltag is None:
+ raise hyperdb.DatabaseError, 'Database open read-only'
+
+ # check if key property was overrided
+ key = self.getkey()
+ keyvalue = self.get(nodeid, key)
+
+ try:
+ id = self.lookup(keyvalue)
+ except KeyError:
+ pass
+ else:
+ raise KeyError, "Key property (%s) of retired node clashes with \
+ existing one (%s)" % (key, keyvalue)
+ # Now we can safely restore node
+ self.fireAuditors('restore', nodeid, None)
+ view = self.getview(READWRITE)
+ ndx = view.find(id=int(nodeid))
+ if ndx < 0:
+ raise KeyError, "nodeid %s not found" % nodeid
+
+ row = view[ndx]
+ oldvalues = self.uncommitted.setdefault(row.id, {})
+ oldval = oldvalues['_isdel'] = row._isdel
+ row._isdel = 0
+
+ if self.do_journal:
+ self.db.addjournal(self.classname, nodeid, _RESTORE, {})
+ if self.key:
+ iv = self.getindexview(READWRITE)
+ ndx = iv.find(k=getattr(row, self.key),i=row.id)
+ if ndx > -1:
+ iv.delete(ndx)
self.db.dirty = 1
+ self.fireReactors('restore', nodeid, None)
+
+ def is_retired(self, nodeid):
+ '''Return true if the node is retired
+ '''
+ view = self.getview(READWRITE)
+ # node must exist & not be retired
+ id = int(nodeid)
+ ndx = view.find(id=id)
+ if ndx < 0:
+ raise IndexError, "%s has no node %s" % (self.classname, nodeid)
+ row = view[ndx]
+ return row._isdel
+
def history(self, nodeid):
- return self.db.gethistory(self.classname, nodeid)
+ '''Retrieve the journal of edits on a particular node.
+
+ 'nodeid' must be the id of an existing node of this class or an
+ IndexError is raised.
+
+ The returned list contains tuples of the form
+
+ (nodeid, date, tag, action, params)
+
+ 'date' is a Timestamp object specifying the time of the change and
+ 'tag' is the journaltag specified when the database was opened.
+ '''
+ if not self.do_journal:
+ raise ValueError, 'Journalling is disabled for this class'
+ return self.db.getjournal(self.classname, nodeid)
+
def setkey(self, propname):
- if self.keyname:
- if propname == self.keyname:
+ '''Select a String property of this class to be the key property.
+
+ 'propname' must be the name of a String property of this class or
+ None, or a TypeError is raised. The values of the key property on
+ all existing nodes must be unique or a ValueError is raised.
+ '''
+ if self.key:
+ if propname == self.key:
return
- raise ValueError, "%s already indexed on %s" % (self.classname, self.keyname)
- # first setkey for this run
- self.keyname = propname
- iv = self.db._db.view('_%s' % self.classname)
- if self.db.fastopen or iv.structure():
+ else:
+ # drop the old key table
+ tablename = "_%s.%s"%(self.classname, self.key)
+ self.db._db.getas(tablename)
+
+ #raise ValueError, "%s already indexed on %s"%(self.classname,
+ # self.key)
+
+ prop = self.properties.get(propname, None)
+ if prop is None:
+ prop = self.privateprops.get(propname, None)
+ if prop is None:
+ raise KeyError, "no property %s" % propname
+ if not isinstance(prop, hyperdb.String):
+ raise TypeError, "%s is not a String" % propname
+
+ # the way he index on properties is by creating a
+ # table named _%(classname)s.%(key)s, if this table
+ # exists then everything is okay. If this table
+ # doesn't exist, then generate a new table on the
+ # key value.
+
+ # first setkey for this run or key has been changed
+ self.key = propname
+ tablename = "_%s.%s"%(self.classname, self.key)
+
+ iv = self.db._db.view(tablename)
+ if self.db.fastopen and iv.structure():
return
- # very first setkey ever
- iv = self.db._db.getas('_%s[k:S,i:I]' % self.classname)
+
+ # very first setkey ever or the key has changed
+ self.db.dirty = 1
+ iv = self.db._db.getas('_%s[k:S,i:I]' % tablename)
iv = iv.ordered(1)
- #XXX
- print "setkey building index"
for row in self.getview():
iv.append(k=getattr(row, propname), i=row.id)
+ self.db.commit()
+
def getkey(self):
- return self.keyname
+ '''Return the name of the key property for this class or None.'''
+ return self.key
+
def lookup(self, keyvalue):
+ '''Locate a particular node by its key property and return its id.
+
+ If this class has no key property, a TypeError is raised. If the
+ keyvalue matches one of the values for the key property among
+ the nodes in this class, the matching node's id is returned;
+ otherwise a KeyError is raised.
+ '''
+ if not self.key:
+ raise TypeError, 'No key property set for class %s'%self.classname
+
if type(keyvalue) is not _STRINGTYPE:
- raise TypeError, "%r is not a string" % keyvalue
+ raise TypeError, '%r is not a string'%keyvalue
+
+ # XXX FIX ME -> this is a bit convoluted
+ # First we search the index view to get the id
+ # which is a quicker look up.
+ # Then we lookup the row with id=id
+ # if the _isdel property of the row is 0, return the
+ # string version of the id. (Why string version???)
+ #
+ # Otherwise, just lookup the non-indexed key
+ # in the non-index table and check the _isdel property
iv = self.getindexview()
if iv:
+ # look up the index view for the id,
+ # then instead of looking up the keyvalue, lookup the
+ # quicker id
ndx = iv.find(k=keyvalue)
if ndx > -1:
- return str(iv[ndx].i)
+ view = self.getview()
+ ndx = view.find(id=iv[ndx].i)
+ if ndx > -1:
+ row = view[ndx]
+ if not row._isdel:
+ return str(row.id)
else:
+ # perform the slower query
view = self.getview()
- ndx = view.find({self.keyname:keyvalue, '_isdel':0})
+ ndx = view.find({self.key:keyvalue})
if ndx > -1:
- return str(view[ndx].id)
+ row = view[ndx]
+ if not row._isdel:
+ return str(row.id)
+
raise KeyError, keyvalue
+
+ def destroy(self, id):
+ '''Destroy a node.
+
+ WARNING: this method should never be used except in extremely rare
+ situations where there could never be links to the node being
+ deleted
+
+ WARNING: use retire() instead
+
+ WARNING: the properties of this node will not be available ever again
+
+ WARNING: really, use retire() instead
+
+ Well, I think that's enough warnings. This method exists mostly to
+ support the session storage of the cgi interface.
+
+ The node is completely removed from the hyperdb, including all journal
+ entries. It will no longer be available, and will generally break code
+ if there are any references to the node.
+ '''
+ view = self.getview(READWRITE)
+ ndx = view.find(id=int(id))
+ if ndx > -1:
+ if self.key:
+ keyvalue = getattr(view[ndx], self.key)
+ iv = self.getindexview(READWRITE)
+ if iv:
+ ivndx = iv.find(k=keyvalue)
+ if ivndx > -1:
+ iv.delete(ivndx)
+ view.delete(ndx)
+ self.db.destroyjournal(self.classname, id)
+ self.db.dirty = 1
+
def find(self, **propspec):
- """Get the ids of nodes in this class which link to the given nodes.
+ '''Get the ids of nodes in this class which link to the given nodes.
+
+ 'propspec'
+ consists of keyword args propname={nodeid:1,}
+ 'propname'
+ must be the name of a property in this class, or a
+ KeyError is raised. That property must be a Link or
+ Multilink property, or a TypeError is raised.
- 'propspec' consists of keyword args propname={nodeid:1,}
- 'propname' must be the name of a property in this class, or a
- KeyError is raised. That property must be a Link or Multilink
- property, or a TypeError is raised.
Any node in this class whose propname property links to any of the
nodeids will be returned. Used by the full text indexing, which knows
- that "foo" occurs in msg1, msg3 and file7; so we have hits on these issues:
+ that "foo" occurs in msg1, msg3 and file7; so we have hits on these
+ issues::
+
db.issue.find(messages={'1':1,'3':1}, files={'7':1})
- """
+ '''
propspec = propspec.items()
for propname, nodeid in propspec:
# check the prop is OK
prop = self.ruprops[propname]
- if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink):
+ if (not isinstance(prop, hyperdb.Link) and
+ not isinstance(prop, hyperdb.Multilink)):
raise TypeError, "'%s' not a Link/Multilink property"%propname
vws = []
for propname, ids in propspec:
if type(ids) is _STRINGTYPE:
- ids = {ids:1}
+ ids = {int(ids):1}
+ elif ids is None:
+ ids = {0:1}
+ else:
+ d = {}
+ for id in ids.keys():
+ if id is None:
+ d[0] = 1
+ else:
+ d[int(id)] = 1
+ ids = d
prop = self.ruprops[propname]
view = self.getview()
if isinstance(prop, hyperdb.Multilink):
- view = view.flatten(getattr(view, propname))
def ff(row, nm=propname, ids=ids):
- return ids.has_key(str(row.fid))
+ if not row._isdel:
+ sv = getattr(row, nm)
+ for sr in sv:
+ if ids.has_key(sr.fid):
+ return 1
+ return 0
else:
def ff(row, nm=propname, ids=ids):
- return ids.has_key(str(getattr(row, nm)))
+ return not row._isdel and ids.has_key(getattr(row, nm))
ndxview = view.filter(ff)
vws.append(ndxview.unique())
+
+ # handle the empty match case
+ if not vws:
+ return []
+
ndxview = vws[0]
for v in vws[1:]:
ndxview = ndxview.union(v)
- view = view.remapwith(ndxview)
+ view = self.getview().remapwith(ndxview)
rslt = []
for row in view:
rslt.append(str(row.id))
def list(self):
+ ''' Return a list of the ids of the active nodes in this class.
+ '''
l = []
for row in self.getview().select(_isdel=0):
l.append(str(row.id))
return l
+
+ def getnodeids(self):
+ ''' Retrieve all the ids of the nodes for a particular Class.
+
+ Set retired=None to get all nodes. Otherwise it'll get all the
+ retired or non-retired nodes, depending on the flag.
+ '''
+ l = []
+ for row in self.getview():
+ l.append(str(row.id))
+ return l
+
def count(self):
return len(self.getview())
+
def getprops(self, protected=1):
# protected is not in ping's spec
allprops = self.ruprops.copy()
if protected and self.privateprops is not None:
allprops.update(self.privateprops)
return allprops
+
def addprop(self, **properties):
for key in properties.keys():
if self.ruprops.has_key(key):
- raise ValueError, "%s is already a property of %s" % (key, self.classname)
+ raise ValueError, "%s is already a property of %s"%(key,
+ self.classname)
self.ruprops.update(properties)
+ # Class structure has changed
+ self.db.fastopen = 0
view = self.__getview()
+ self.db.commit()
# ---- end of ping's spec
- def filter(self, search_matches, filterspec, sort, group):
+
+ def filter(self, search_matches, filterspec, sort=(None,None),
+ group=(None,None)):
+ '''Return a list of the ids of the active nodes in this class that
+ match the 'filter' spec, sorted by the group spec and then the
+ sort spec
+
+ "filterspec" is {propname: value(s)}
+
+ "sort" and "group" are (dir, prop) where dir is '+', '-' or None
+ and prop is a prop name or None
+
+ "search_matches" is {nodeid: marker}
+
+ The filter must match all properties specificed - but if the
+ property value to match is a list, any one of the values in the
+ list may match for that property to match.
+ '''
# search_matches is None or a set (dict of {nodeid: {propname:[nodeid,...]}})
# filterspec is a dict {propname:value}
- # sort and group are lists of propnames
-
+ # sort and group are (dir, prop) where dir is '+', '-' or None
+ # and prop is a prop name or None
+
+ timezone = self.db.getUserTimezone()
+
where = {'_isdel':0}
+ wherehigh = {}
mlcriteria = {}
regexes = {}
orcriteria = {}
if prop is None:
prop = self.privateprops[propname]
if isinstance(prop, hyperdb.Multilink):
- if type(value) is not _LISTTYPE:
+ if value in ('-1', ['-1']):
+ value = []
+ elif type(value) is not _LISTTYPE:
value = [value]
# transform keys to ids
u = []
else:
orcriteria[propname] = u
elif isinstance(prop, hyperdb.String):
- # simple glob searching
- v = re.sub(r'([\|\{\}\\\.\+\[\]\(\)])', r'\\\1', value)
- v = v.replace('?', '.')
- v = v.replace('*', '.*?')
- regexes[propname] = re.compile(v, re.I)
+ if type(value) is not type([]):
+ value = [value]
+ m = []
+ for v in value:
+ # simple glob searching
+ v = re.sub(r'([\|\{\}\\\.\+\[\]\(\)])', r'\\\1', v)
+ v = v.replace('?', '.')
+ v = v.replace('*', '.*?')
+ m.append(v)
+ regexes[propname] = re.compile('(%s)'%('|'.join(m)), re.I)
elif propname == 'id':
where[propname] = int(value)
+ elif isinstance(prop, hyperdb.Boolean):
+ if type(value) is _STRINGTYPE:
+ bv = value.lower() in ('yes', 'true', 'on', '1')
+ else:
+ bv = value
+ where[propname] = bv
+ elif isinstance(prop, hyperdb.Date):
+ try:
+ # Try to filter on range of dates
+ date_rng = Range(value, date.Date, offset=timezone)
+ if date_rng.from_value:
+ t = date_rng.from_value.get_tuple()
+ where[propname] = int(calendar.timegm(t))
+ else:
+ # use minimum possible value to exclude items without
+ # 'prop' property
+ where[propname] = 0
+ if date_rng.to_value:
+ t = date_rng.to_value.get_tuple()
+ wherehigh[propname] = int(calendar.timegm(t))
+ else:
+ wherehigh[propname] = None
+ except ValueError:
+ # If range creation fails - ignore that search parameter
+ pass
+ elif isinstance(prop, hyperdb.Interval):
+ try:
+ # Try to filter on range of intervals
+ date_rng = Range(value, date.Interval)
+ if date_rng.from_value:
+ #t = date_rng.from_value.get_tuple()
+ where[propname] = date_rng.from_value.serialise()
+ else:
+ # use minimum possible value to exclude items without
+ # 'prop' property
+ where[propname] = '-99999999999999'
+ if date_rng.to_value:
+ #t = date_rng.to_value.get_tuple()
+ wherehigh[propname] = date_rng.to_value.serialise()
+ else:
+ wherehigh[propname] = None
+ except ValueError:
+ # If range creation fails - ignore that search parameter
+ pass
+ elif isinstance(prop, hyperdb.Number):
+ where[propname] = int(value)
else:
where[propname] = str(value)
v = self.getview()
#print "filter start at %s" % time.time()
if where:
- v = v.select(where)
+ where_higherbound = where.copy()
+ where_higherbound.update(wherehigh)
+ v = v.select(where, where_higherbound)
#print "filter where at %s" % time.time()
-
+
if mlcriteria:
- # multilink - if any of the nodeids required by the
- # filterspec aren't in this node's property, then skip
- # it
+ # multilink - if any of the nodeids required by the
+ # filterspec aren't in this node's property, then skip it
def ff(row, ml=mlcriteria):
for propname, values in ml.items():
sv = getattr(row, propname)
+ if not values and sv:
+ return 0
for id in values:
if sv.find(fid=id) == -1:
return 0
if regexes:
def ff(row, r=regexes):
for propname, regex in r.items():
- val = getattr(row, propname)
+ val = str(getattr(row, propname))
if not regex.search(val):
return 0
return 1
if sort or group:
sortspec = []
rev = []
- for propname in group + sort:
+ for dir, propname in group, sort:
+ if propname is None: continue
isreversed = 0
- if propname[0] == '-':
- propname = propname[1:]
+ if dir == '-':
isreversed = 1
try:
prop = getattr(v, propname)
except AttributeError:
- # I can't sort on 'activity', cause it's psuedo!!
+ print "MK has no property %s" % propname
continue
+ propclass = self.ruprops.get(propname, None)
+ if propclass is None:
+ propclass = self.privateprops.get(propname, None)
+ if propclass is None:
+ print "Schema has no property %s" % propname
+ continue
+ if isinstance(propclass, hyperdb.Link):
+ linkclass = self.db.getclass(propclass.classname)
+ lv = linkclass.getview()
+ lv = lv.rename('id', propname)
+ v = v.join(lv, prop, 1)
+ if linkclass.getprops().has_key('order'):
+ propname = 'order'
+ else:
+ propname = linkclass.labelprop()
+ prop = getattr(v, propname)
if isreversed:
rev.append(prop)
sortspec.append(prop)
- v = v.sortrev(sortspec, rev)[:] #XXX Aaaabh
+ v = v.sortrev(sortspec, rev)[:] #XXX Metakit bug
#print "filter sort at %s" % time.time()
rslt = []
return rslt
def hasnode(self, nodeid):
+ '''Determine if the given nodeid actually exists
+ '''
return int(nodeid) < self.maxid
def labelprop(self, default_to_id=0):
- ''' Return the property name for a label for the given node.
+ '''Return the property name for a label for the given node.
This method attempts to generate a consistent label for the node.
It tries the following in order:
- 1. key property
- 2. "name" property
- 3. "title" property
- 4. first property from the sorted property name list
+
+ 1. key property
+ 2. "name" property
+ 3. "title" property
+ 4. first property from the sorted property name list
'''
k = self.getkey()
if k:
props = props.keys()
props.sort()
return props[0]
+
def stringFind(self, **requirements):
- """Locate a particular node by matching a set of its String
+ '''Locate a particular node by matching a set of its String
properties in a caseless search.
If the property is not a String property, a TypeError is raised.
The return is a list of the id of all nodes that match.
- """
+ '''
for propname in requirements.keys():
prop = self.properties[propname]
if isinstance(not prop, hyperdb.String):
return l
def addjournal(self, nodeid, action, params):
+ '''Add a journal to the given nodeid,
+ 'action' may be:
+
+ 'create' or 'set' -- 'params' is a dictionary of property values
+ 'link' or 'unlink' -- 'params' is (classname, nodeid, propname)
+ 'retire' -- 'params' is None
+ '''
self.db.addjournal(self.classname, nodeid, action, params)
def index(self, nodeid):
self.db.indexer.add_text((self.classname, nodeid, prop),
str(self.get(nodeid, prop)))
+ def export_list(self, propnames, nodeid):
+ ''' Export a node - generate a list of CSV-able data in the order
+ specified by propnames for the given node.
+ '''
+ properties = self.getprops()
+ l = []
+ for prop in propnames:
+ proptype = properties[prop]
+ value = self.get(nodeid, prop)
+ # "marshal" data where needed
+ if value is None:
+ pass
+ elif isinstance(proptype, hyperdb.Date):
+ value = value.get_tuple()
+ elif isinstance(proptype, hyperdb.Interval):
+ value = value.get_tuple()
+ elif isinstance(proptype, hyperdb.Password):
+ value = str(value)
+ l.append(repr(value))
+
+ # append retired flag
+ l.append(repr(self.is_retired(nodeid)))
+
+ return l
+
+ def import_list(self, propnames, proplist):
+ ''' Import a node - all information including "id" is present and
+ should not be sanity checked. Triggers are not triggered. The
+ journal should be initialised using the "creator" and "creation"
+ information.
+
+ Return the nodeid of the node imported.
+ '''
+ if self.db.journaltag is None:
+ raise hyperdb.DatabaseError, 'Database open read-only'
+ properties = self.getprops()
+
+ d = {}
+ view = self.getview(READWRITE)
+ for i in range(len(propnames)):
+ value = eval(proplist[i])
+ if not value:
+ continue
+
+ propname = propnames[i]
+ if propname == 'id':
+ newid = value = int(value)
+ elif propname == 'is retired':
+ # is the item retired?
+ if int(value):
+ d['_isdel'] = 1
+ continue
+ elif value is None:
+ d[propname] = None
+ continue
+
+ prop = properties[propname]
+ if isinstance(prop, hyperdb.Date):
+ value = int(calendar.timegm(value))
+ elif isinstance(prop, hyperdb.Interval):
+ value = date.Interval(value).serialise()
+ elif isinstance(prop, hyperdb.Number):
+ value = int(value)
+ elif isinstance(prop, hyperdb.Boolean):
+ value = int(value)
+ elif isinstance(prop, hyperdb.Link) and value:
+ value = int(value)
+ elif isinstance(prop, hyperdb.Multilink):
+ # we handle multilinks separately
+ continue
+ d[propname] = value
+
+ # possibly make a new node
+ if not d.has_key('id'):
+ d['id'] = newid = self.maxid
+ self.maxid += 1
+
+ # save off the node
+ view.append(d)
+
+ # fix up multilinks
+ ndx = view.find(id=newid)
+ row = view[ndx]
+ for i in range(len(propnames)):
+ value = eval(proplist[i])
+ propname = propnames[i]
+ if propname == 'is retired':
+ continue
+ prop = properties[propname]
+ if not isinstance(prop, hyperdb.Multilink):
+ continue
+ sv = getattr(row, propname)
+ for entry in value:
+ sv.append((int(entry),))
+
+ self.db.dirty = 1
+ creator = d.get('creator', 0)
+ creation = d.get('creation', 0)
+ self.db.addjournal(self.classname, str(newid), _CREATE, {}, creator,
+ creation)
+ return newid
+
# --- used by Database
def _commit(self):
- """ called post commit of the DB.
- interested subclasses may override """
+ ''' called post commit of the DB.
+ interested subclasses may override '''
self.uncommitted = {}
self.rbactions = []
self.idcache = {}
def _rollback(self):
- """ called pre rollback of the DB.
- interested subclasses may override """
+ ''' called pre rollback of the DB.
+ interested subclasses may override '''
for action in self.rbactions:
action()
self.rbactions = []
self.uncommitted = {}
self.idcache = {}
def _clear(self):
- view = self.getview(1)
+ view = self.getview(READWRITE)
if len(view):
view[:] = []
self.db.dirty = 1
- iv = self.getindexview(1)
+ iv = self.getindexview(READWRITE)
if iv:
iv[:] = []
def rollbackaction(self, action):
- """ call this to register a callback called on rollback
- callback is removed on end of transaction """
+ ''' call this to register a callback called on rollback
+ callback is removed on end of transaction '''
self.rbactions.append(action)
# --- internal
def __getview(self):
+ ''' Find the interface for a specific Class in the hyperdb.
+
+ This method checks to see whether the schema has changed and
+ re-works the underlying metakit structure if it has.
+ '''
db = self.db._db
view = db.view(self.classname)
- if self.db.fastopen:
+ mkprops = view.structure()
+
+ # if we have structure in the database, and the structure hasn't
+ # changed
+ # note on view.ordered ->
+ # return a metakit view ordered on the id column
+ # id is always the first column. This speeds up
+ # look-ups on the id column.
+
+ if mkprops and self.db.fastopen:
return view.ordered(1)
+
# is the definition the same?
- mkprops = view.structure()
for nm, rutyp in self.ruprops.items():
for mkprop in mkprops:
if mkprop.name == nm:
else:
mkprop = None
if mkprop is None:
- #print "%s missing prop %s (%s)" % (self.classname, nm, rutyp.__class__.__name__)
break
if _typmap[rutyp.__class__] != mkprop.type:
- #print "%s - prop %s (%s) has wrong mktyp (%s)" % (self.classname, nm, rutyp.__class__.__name__, mkprop.type)
break
else:
+
return view.ordered(1)
- # need to create or restructure the mk view
- # id comes first, so MK will order it for us
+ # The schema has changed. We need to create or restructure the mk view
+ # id comes first, so we can use view.ordered(1) so that
+ # MK will order it for us to allow binary-search quick lookups on
+ # the id column
self.db.dirty = 1
s = ["%s[id:I" % self.classname]
+
+ # these columns will always be added, we can't trample them :)
+ _columns = {"id":"I", "_isdel":"I", "activity":"I", "creation":"I", "creator":"I"}
+
for nm, rutyp in self.ruprops.items():
- mktyp = _typmap[rutyp.__class__]
+ mktyp = _typmap[rutyp.__class__].upper()
+ if nm in _columns and _columns[nm] != mktyp:
+ # oops, two columns with the same name and different properties
+ raise MKBackendError("column %s for table %sis defined with multiple types"%(nm, self.classname))
+ _columns[nm] = mktyp
s.append('%s:%s' % (nm, mktyp))
if mktyp == 'V':
s[-1] += ('[fid:I]')
+
+ # XXX FIX ME -> in some tests, creation:I becomes creation:S is this
+ # okay? Does this need to be supported?
s.append('_isdel:I,activity:I,creation:I,creator:I]')
- v = db.getas(','.join(s))
- return v.ordered(1)
+ view = self.db._db.getas(','.join(s))
+ self.db.commit()
+ return view.ordered(1)
def getview(self, RW=0):
- if RW and self.db.isReadOnly():
- self.db.getWriteAccess()
+ # XXX FIX ME -> The RW flag doesn't do anything.
return self.db._db.view(self.classname).ordered(1)
def getindexview(self, RW=0):
- if RW and self.db.isReadOnly():
- self.db.getWriteAccess()
- return self.db._db.view("_%s" % self.classname).ordered(1)
-
+ # XXX FIX ME -> The RW flag doesn't do anything.
+ tablename = "_%s.%s"%(self.classname, self.key)
+ return self.db._db.view("_%s" % tablename).ordered(1)
+
def _fetchML(sv):
l = []
for row in sv:
return l
def _fetchPW(s):
+ ''' Convert to a password.Password unless the password is '' which is
+ our sentinel for "unset".
+ '''
+ if s == '':
+ return None
p = password.Password()
p.unpack(s)
return p
def _fetchLink(n):
+ ''' Return None if the link is 0 - otherwise strify it.
+ '''
return n and str(n) or None
def _fetchDate(n):
+ ''' Convert the timestamp to a date.Date instance - unless it's 0 which
+ is our sentinel for "unset".
+ '''
+ if n == 0:
+ return None
return date.Date(time.gmtime(n))
+def _fetchInterval(n):
+ ''' Convert to a date.Interval unless the interval is '' which is our
+ sentinel for "unset".
+ '''
+ if n == '':
+ return None
+ return date.Interval(n)
+
+# Converters for boolean and numbers to properly
+# return None values.
+# These are in conjunction with the setters above
+# look for hyperdb.Boolean and hyperdb.Number
+if BACKWARDS_COMPATIBLE:
+ def getBoolean(bool): return bool
+ def getNumber(number): return number
+else:
+ def getBoolean(bool):
+ if not bool: res = None
+ else: res = bool - 1
+ return res
+
+ def getNumber(number):
+ if number == 0: res = None
+ elif number < 0: res = number
+ else: res = number - 1
+ return res
+
_converters = {
hyperdb.Date : _fetchDate,
hyperdb.Link : _fetchLink,
hyperdb.Multilink : _fetchML,
- hyperdb.Interval : date.Interval,
+ hyperdb.Interval : _fetchInterval,
hyperdb.Password : _fetchPW,
+ hyperdb.Boolean : getBoolean,
+ hyperdb.Number : getNumber,
+ hyperdb.String : lambda s: s and str(s) or None,
}
class FileName(hyperdb.String):
hyperdb.Multilink : 'V',
hyperdb.Interval : 'S',
hyperdb.Password : 'S',
+ hyperdb.Boolean : 'I',
+ hyperdb.Number : 'I',
}
-class FileClass(Class):
- ' like Class but with a content property '
+class FileClass(Class, hyperdb.FileClass):
+ ''' like Class but with a content property
+ '''
default_mime_type = 'text/plain'
def __init__(self, db, classname, **properties):
properties['content'] = FileName()
if not properties.has_key('type'):
properties['type'] = hyperdb.String()
Class.__init__(self, db, classname, **properties)
+
def get(self, nodeid, propname, default=_marker, cache=1):
- x = Class.get(self, nodeid, propname, default, cache)
+ x = Class.get(self, nodeid, propname, default)
+ poss_msg = 'Possibly an access right configuration problem.'
if propname == 'content':
if x.startswith('file:'):
fnm = x[5:]
try:
- x = open(fnm, 'rb').read()
- except Exception, e:
- x = repr(e)
+ f = open(fnm, 'rb')
+ except IOError, (strerror):
+ # XXX by catching this we donot see an error in the log.
+ return 'ERROR reading file: %s%s\n%s\n%s'%(
+ self.classname, nodeid, poss_msg, strerror)
+ x = f.read()
+ f.close()
return x
+
def create(self, **propvalues):
+ if not propvalues:
+ raise ValueError, "Need something to create!"
+ self.fireAuditors('create', None, propvalues)
content = propvalues['content']
del propvalues['content']
- newid = Class.create(self, **propvalues)
+ newid = Class.create_inner(self, **propvalues)
if not content:
return newid
- if content.startswith('/tracker/download.php?'):
- self.set(newid, content='http://sourceforge.net'+content)
- return newid
nm = bnm = '%s%s' % (self.classname, newid)
sd = str(int(int(newid) / 1000))
d = os.path.join(self.db.config.DATABASE, 'files', self.classname, sd)
open(nm, 'wb').write(content)
self.set(newid, content = 'file:'+nm)
mimetype = propvalues.get('type', self.default_mime_type)
- self.db.indexer.add_text((self.classname, newid, 'content'), content, mimetype)
+ self.db.indexer.add_text((self.classname, newid, 'content'), content,
+ mimetype)
def undo(fnm=nm, action1=os.remove, indexer=self.db.indexer):
- remove(fnm)
+ action1(fnm)
self.rollbackaction(undo)
return newid
+
def index(self, nodeid):
Class.index(self, nodeid)
mimetype = self.get(nodeid, 'type')
self.db.indexer.add_text((self.classname, nodeid, 'content'),
self.get(nodeid, 'content'), mimetype)
-# Yuck - c&p to avoid getting hyperdb.Class
-class IssueClass(Class):
-
- # Overridden methods:
-
- def __init__(self, db, classname, **properties):
- """The newly-created class automatically includes the "messages",
+class IssueClass(Class, roundupdb.IssueClass):
+ ''' The newly-created class automatically includes the "messages",
"files", "nosy", and "superseder" properties. If the 'properties'
dictionary attempts to specify any of these properties or a
- "creation" or "activity" property, a ValueError is raised."""
+ "creation" or "activity" property, a ValueError is raised.
+ '''
+ def __init__(self, db, classname, **properties):
if not properties.has_key('title'):
properties['title'] = hyperdb.String(indexme='yes')
if not properties.has_key('messages'):
if not properties.has_key('files'):
properties['files'] = hyperdb.Multilink("file")
if not properties.has_key('nosy'):
- properties['nosy'] = hyperdb.Multilink("user")
+ # note: journalling is turned off as it really just wastes
+ # space. this behaviour may be overridden in an instance
+ properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
if not properties.has_key('superseder'):
properties['superseder'] = hyperdb.Multilink(classname)
Class.__init__(self, db, classname, **properties)
+
+CURVERSION = 2
+
+class Indexer(indexer.Indexer):
+ disallows = {'THE':1, 'THIS':1, 'ZZZ':1, 'THAT':1, 'WITH':1}
+ def __init__(self, path, datadb):
+ self.path = os.path.join(path, 'index.mk4')
+ self.db = metakit.storage(self.path, 1)
+ self.datadb = datadb
+ self.reindex = 0
+ v = self.db.view('version')
+ if not v.structure():
+ v = self.db.getas('version[vers:I]')
+ self.db.commit()
+ v.append(vers=CURVERSION)
+ self.reindex = 1
+ elif v[0].vers != CURVERSION:
+ v[0].vers = CURVERSION
+ self.reindex = 1
+ if self.reindex:
+ self.db.getas('ids[tblid:I,nodeid:I,propid:I,ignore:I]')
+ self.db.getas('index[word:S,hits[pos:I]]')
+ self.db.commit()
+ self.reindex = 1
+ self.changed = 0
+ self.propcache = {}
- # New methods:
-
- def addmessage(self, nodeid, summary, text):
- """Add a message to an issue's mail spool.
-
- A new "msg" node is constructed using the current date, the user that
- owns the database connection as the author, and the specified summary
- text.
-
- The "files" and "recipients" fields are left empty.
-
- The given text is saved as the body of the message and the node is
- appended to the "messages" field of the specified issue.
- """
-
- def nosymessage(self, nodeid, msgid, oldvalues):
- """Send a message to the members of an issue's nosy list.
-
- The message is sent only to users on the nosy list who are not
- already on the "recipients" list for the message.
+ def close(self):
+ '''close the indexing database'''
+ del self.db
+ self.db = None
+
+ def force_reindex(self):
+ '''Force a reindexing of the database. This essentially
+ empties the tables ids and index and sets a flag so
+ that the databases are reindexed'''
+ v = self.db.view('ids')
+ v[:] = []
+ v = self.db.view('index')
+ v[:] = []
+ self.db.commit()
+ self.reindex = 1
+
+ def should_reindex(self):
+ '''returns True if the indexes need to be rebuilt'''
+ return self.reindex
+
+ def _getprops(self, classname):
+ props = self.propcache.get(classname, None)
+ if props is None:
+ props = self.datadb.view(classname).structure()
+ props = [prop.name for prop in props]
+ self.propcache[classname] = props
+ return props
+
+ def _getpropid(self, classname, propname):
+ return self._getprops(classname).index(propname)
+
+ def _getpropname(self, classname, propid):
+ return self._getprops(classname)[propid]
+
+ def add_text(self, identifier, text, mime_type='text/plain'):
+ if mime_type != 'text/plain':
+ return
+ classname, nodeid, property = identifier
+ tbls = self.datadb.view('tables')
+ tblid = tbls.find(name=classname)
+ if tblid < 0:
+ raise KeyError, "unknown class %r"%classname
+ nodeid = int(nodeid)
+ propid = self._getpropid(classname, property)
+ ids = self.db.view('ids')
+ oldpos = ids.find(tblid=tblid,nodeid=nodeid,propid=propid,ignore=0)
+ if oldpos > -1:
+ ids[oldpos].ignore = 1
+ self.changed = 1
+ pos = ids.append(tblid=tblid,nodeid=nodeid,propid=propid)
- These users are then added to the message's "recipients" list.
- """
- users = self.db.user
- messages = self.db.msg
-
- # figure the recipient ids
- sendto = []
- r = {}
- recipients = messages.get(msgid, 'recipients')
- for recipid in messages.get(msgid, 'recipients'):
- r[recipid] = 1
-
- # figure the author's id, and indicate they've received the message
- authid = messages.get(msgid, 'author')
-
- # possibly send the message to the author, as long as they aren't
- # anonymous
- if (self.db.config.MESSAGES_TO_AUTHOR == 'yes' and
- users.get(authid, 'username') != 'anonymous'):
- sendto.append(authid)
- r[authid] = 1
-
- # now figure the nosy people who weren't recipients
- nosy = self.get(nodeid, 'nosy')
- for nosyid in nosy:
- # Don't send nosy mail to the anonymous user (that user
- # shouldn't appear in the nosy list, but just in case they
- # do...)
- if users.get(nosyid, 'username') == 'anonymous':
+ wordlist = re.findall(r'\b\w{2,25}\b', text.upper())
+ words = {}
+ for word in wordlist:
+ if not self.disallows.has_key(word):
+ words[word] = 1
+ words = words.keys()
+
+ index = self.db.view('index').ordered(1)
+ for word in words:
+ ndx = index.find(word=word)
+ if ndx < 0:
+ index.append(word=word)
+ ndx = index.find(word=word)
+ index[ndx].hits.append(pos=pos)
+ self.changed = 1
+
+ def find(self, wordlist):
+ '''look up all the words in the wordlist.
+ If none are found return an empty dictionary
+ * more rules here
+ '''
+ hits = None
+ index = self.db.view('index').ordered(1)
+ for word in wordlist:
+ word = word.upper()
+ if not 2 < len(word) < 26:
continue
- # make sure they haven't seen the message already
- if not r.has_key(nosyid):
- # send it to them
- sendto.append(nosyid)
- recipients.append(nosyid)
-
- # generate a change note
- if oldvalues:
- note = self.generateChangeNote(nodeid, oldvalues)
- else:
- note = self.generateCreateNote(nodeid)
-
- # we have new recipients
- if sendto:
- # map userids to addresses
- sendto = [users.get(i, 'address') for i in sendto]
-
- # update the message's recipients list
- messages.set(msgid, recipients=recipients)
-
- # send the message
- self.send_message(nodeid, msgid, note, sendto)
-
- # XXX backwards compatibility - don't remove
- sendmessage = nosymessage
-
- def send_message(self, nodeid, msgid, note, sendto):
- '''Actually send the nominated message from this node to the sendto
- recipients, with the note appended.
- '''
- users = self.db.user
- messages = self.db.msg
- files = self.db.file
-
- # determine the messageid and inreplyto of the message
- inreplyto = messages.get(msgid, 'inreplyto')
- messageid = messages.get(msgid, 'messageid')
-
- # make up a messageid if there isn't one (web edit)
- if not messageid:
- # this is an old message that didn't get a messageid, so
- # create one
- messageid = "<%s.%s.%s%s@%s>"%(time.time(), random.random(),
- self.classname, nodeid, self.db.config.MAIL_DOMAIN)
- messages.set(msgid, messageid=messageid)
-
- # send an email to the people who missed out
- cn = self.classname
- title = self.get(nodeid, 'title') or '%s message copy'%cn
- # figure author information
- authid = messages.get(msgid, 'author')
- authname = users.get(authid, 'realname')
- if not authname:
- authname = users.get(authid, 'username')
- authaddr = users.get(authid, 'address')
- if authaddr:
- authaddr = ' <%s>'%authaddr
- else:
- authaddr = ''
+ ndx = index.find(word=word)
+ if ndx < 0:
+ return {}
+ if hits is None:
+ hits = index[ndx].hits
+ else:
+ hits = hits.intersect(index[ndx].hits)
+ if len(hits) == 0:
+ return {}
+ if hits is None:
+ return {}
+ rslt = {}
+ ids = self.db.view('ids').remapwith(hits)
+ tbls = self.datadb.view('tables')
+ for i in range(len(ids)):
+ hit = ids[i]
+ if not hit.ignore:
+ classname = tbls[hit.tblid].name
+ nodeid = str(hit.nodeid)
+ property = self._getpropname(classname, hit.propid)
+ rslt[i] = (classname, nodeid, property)
+ return rslt
- # make the message body
- m = ['']
+ def save_index(self):
+ if self.changed:
+ self.db.commit()
+ self.changed = 0
- # put in roundup's signature
- if self.db.config.EMAIL_SIGNATURE_POSITION == 'top':
- m.append(self.email_signature(nodeid, msgid))
+ def rollback(self):
+ if self.changed:
+ self.db.rollback()
+ self.db = metakit.storage(self.path, 1)
+ self.changed = 0
- # add author information
- if len(self.get(nodeid,'messages')) == 1:
- m.append("New submission from %s%s:"%(authname, authaddr))
- else:
- m.append("%s%s added the comment:"%(authname, authaddr))
- m.append('')
-
- # add the content
- m.append(messages.get(msgid, 'content'))
-
- # add the change note
- if note:
- m.append(note)
-
- # put in roundup's signature
- if self.db.config.EMAIL_SIGNATURE_POSITION == 'bottom':
- m.append(self.email_signature(nodeid, msgid))
-
- # encode the content as quoted-printable
- content = cStringIO.StringIO('\n'.join(m))
- content_encoded = cStringIO.StringIO()
- quopri.encode(content, content_encoded, 0)
- content_encoded = content_encoded.getvalue()
-
- # get the files for this message
- message_files = messages.get(msgid, 'files')
-
- # make sure the To line is always the same (for testing mostly)
- sendto.sort()
-
- # create the message
- message = cStringIO.StringIO()
- writer = MimeWriter.MimeWriter(message)
- writer.addheader('Subject', '[%s%s] %s'%(cn, nodeid, title))
- writer.addheader('To', ', '.join(sendto))
- writer.addheader('From', '%s <%s>'%(authname,
- self.db.config.ISSUE_TRACKER_EMAIL))
- writer.addheader('Reply-To', '%s <%s>'%(self.db.config.INSTANCE_NAME,
- self.db.config.ISSUE_TRACKER_EMAIL))
- writer.addheader('MIME-Version', '1.0')
- if messageid:
- writer.addheader('Message-Id', messageid)
- if inreplyto:
- writer.addheader('In-Reply-To', inreplyto)
-
- # add a uniquely Roundup header to help filtering
- writer.addheader('X-Roundup-Name', self.db.config.INSTANCE_NAME)
-
- # attach files
- if message_files:
- part = writer.startmultipartbody('mixed')
- part = writer.nextpart()
- part.addheader('Content-Transfer-Encoding', 'quoted-printable')
- body = part.startbody('text/plain')
- body.write(content_encoded)
- for fileid in message_files:
- name = files.get(fileid, 'name')
- mime_type = files.get(fileid, 'type')
- content = files.get(fileid, 'content')
- part = writer.nextpart()
- if mime_type == 'text/plain':
- part.addheader('Content-Disposition',
- 'attachment;\n filename="%s"'%name)
- part.addheader('Content-Transfer-Encoding', '7bit')
- body = part.startbody('text/plain')
- body.write(content)
- else:
- # some other type, so encode it
- if not mime_type:
- # this should have been done when the file was saved
- mime_type = mimetypes.guess_type(name)[0]
- if mime_type is None:
- mime_type = 'application/octet-stream'
- part.addheader('Content-Disposition',
- 'attachment;\n filename="%s"'%name)
- part.addheader('Content-Transfer-Encoding', 'base64')
- body = part.startbody(mime_type)
- body.write(base64.encodestring(content))
- writer.lastpart()
- else:
- writer.addheader('Content-Transfer-Encoding', 'quoted-printable')
- body = writer.startbody('text/plain')
- body.write(content_encoded)
-
- # now try to send the message
- if SENDMAILDEBUG:
- open(SENDMAILDEBUG, 'w').write('FROM: %s\nTO: %s\n%s\n'%(
- self.db.config.ADMIN_EMAIL,
- ', '.join(sendto),message.getvalue()))
- else:
- try:
- # send the message as admin so bounces are sent there
- # instead of to roundup
- smtp = smtplib.SMTP(self.db.config.MAILHOST)
- smtp.sendmail(self.db.config.ADMIN_EMAIL, sendto,
- message.getvalue())
- except socket.error, value:
- raise MessageSendError, \
- "Couldn't send confirmation email: mailhost %s"%value
- except smtplib.SMTPException, value:
- raise MessageSendError, \
- "Couldn't send confirmation email: %s"%value
-
- def email_signature(self, nodeid, msgid):
- ''' Add a signature to the e-mail with some useful information
- '''
- web = self.db.config.ISSUE_TRACKER_WEB + 'issue'+ nodeid
- email = '"%s" <%s>'%(self.db.config.INSTANCE_NAME,
- self.db.config.ISSUE_TRACKER_EMAIL)
- line = '_' * max(len(web), len(email))
- return '%s\n%s\n%s\n%s'%(line, email, web, line)
-
- def generateCreateNote(self, nodeid):
- """Generate a create note that lists initial property values
- """
- cn = self.classname
- cl = self.db.classes[cn]
- props = cl.getprops(protected=0)
-
- # list the values
- m = []
- l = props.items()
- l.sort()
- for propname, prop in l:
- value = cl.get(nodeid, propname, None)
- # skip boring entries
- if not value:
- continue
- if isinstance(prop, hyperdb.Link):
- link = self.db.classes[prop.classname]
- if value:
- key = link.labelprop(default_to_id=1)
- if key:
- value = link.get(value, key)
- else:
- value = ''
- elif isinstance(prop, hyperdb.Multilink):
- if value is None: value = []
- l = []
- link = self.db.classes[prop.classname]
- key = link.labelprop(default_to_id=1)
- if key:
- value = [link.get(entry, key) for entry in value]
- value.sort()
- value = ', '.join(value)
- m.append('%s: %s'%(propname, value))
- m.insert(0, '----------')
- m.insert(0, '')
- return '\n'.join(m)
-
- def generateChangeNote(self, nodeid, oldvalues):
- """Generate a change note that lists property changes
- """
- cn = self.classname
- cl = self.db.classes[cn]
- changed = {}
- props = cl.getprops(protected=0)
-
- # determine what changed
- for key in oldvalues.keys():
- if key in ['files','messages']: continue
- new_value = cl.get(nodeid, key)
- # the old value might be non existent
- try:
- old_value = oldvalues[key]
- if type(new_value) is type([]):
- new_value.sort()
- old_value.sort()
- if new_value != old_value:
- changed[key] = old_value
- except:
- changed[key] = new_value
-
- # list the changes
- m = []
- l = changed.items()
- l.sort()
- for propname, oldvalue in l:
- prop = props[propname]
- value = cl.get(nodeid, propname, None)
- if isinstance(prop, hyperdb.Link):
- link = self.db.classes[prop.classname]
- key = link.labelprop(default_to_id=1)
- if key:
- if value:
- value = link.get(value, key)
- else:
- value = ''
- if oldvalue:
- oldvalue = link.get(oldvalue, key)
- else:
- oldvalue = ''
- change = '%s -> %s'%(oldvalue, value)
- elif isinstance(prop, hyperdb.Multilink):
- change = ''
- if value is None: value = []
- if oldvalue is None: oldvalue = []
- l = []
- link = self.db.classes[prop.classname]
- key = link.labelprop(default_to_id=1)
- # check for additions
- for entry in value:
- if entry in oldvalue: continue
- if key:
- l.append(link.get(entry, key))
- else:
- l.append(entry)
- if l:
- change = '+%s'%(', '.join(l))
- l = []
- # check for removals
- for entry in oldvalue:
- if entry in value: continue
- if key:
- l.append(link.get(entry, key))
- else:
- l.append(entry)
- if l:
- change += ' -%s'%(', '.join(l))
- else:
- change = '%s -> %s'%(oldvalue, value)
- m.append('%s: %s'%(propname, change))
- if m:
- m.insert(0, '----------')
- m.insert(0, '')
- return '\n'.join(m)