index 5d67abc4ca6d56089099a2c52cfeb1065a7f980d..2894cc63e35a8374b17ed070bd0fd0f6955353a9 100644 (file)
-# $Id: rdbms_common.py,v 1.62 2003-09-08 20:39:18 jlgijsbers Exp $
+# $Id: rdbms_common.py,v 1.81 2004-03-18 01:58:45 richard Exp $
''' Relational database (SQL) backend common code.
Basics:
probably a bit of work to be done if a database is used that actually
honors column typing, since the initial databases don't (sqlite stores
everything as a string.)
+
+The schema of the hyperdb being mapped to the database is stored in the
+database itself as a repr()'ed dictionary of information about each Class
+that maps to a table. If that information differs from the hyperdb schema,
+then we update it. We also store in the schema dict a version which
+allows us to upgrade the database schema when necessary. See upgrade_db().
'''
+__docformat__ = 'restructuredtext'
# standard python modules
import sys, os, time, re, errno, weakref, copy
# support
from blobfiles import FileStorage
from roundup.indexer import Indexer
-from sessions import Sessions, OneTimeKeys
+from sessions_rdbms import Sessions, OneTimeKeys
from roundup.date import Range
# number of rows to keep in memory
self.dir = config.DATABASE
self.classes = {}
self.indexer = Indexer(self.dir)
- self.sessions = Sessions(self.config)
- self.otks = OneTimeKeys(self.config)
self.security = security.Security(self)
# additional transaction support for external files and the like
self.cache = {}
self.cache_lru = []
+ def getSessionManager(self):
+ return Sessions(self)
+
+ def getOTKManager(self):
+ return OneTimeKeys(self)
+
def open_connection(self):
- ''' Open a connection to the database, creating it if necessary
+ ''' Open a connection to the database, creating it if necessary.
+
+ Must call self.load_dbschema()
'''
raise NotImplemented
def sql_fetchone(self):
''' Fetch a single row. If there's nothing to fetch, return None.
'''
- raise NotImplemented
+ return self.cursor.fetchone()
+
+ def sql_fetchall(self):
+ ''' Fetch all rows. If there's nothing to fetch, return [].
+ '''
+ return self.cursor.fetchall()
def sql_stringquote(self, value):
''' Quote the string so it's safe to put in the 'sql quotes'
'''
return re.sub("'", "''", str(value))
- def save_dbschema(self, schema):
- ''' Save the schema definition that the database currently implements
- '''
- raise NotImplemented
+ def init_dbschema(self):
+ self.database_schema = {
+ 'version': self.current_db_version,
+ 'tables': {}
+ }
def load_dbschema(self):
''' Load the schema definition that the database currently implements
'''
- raise NotImplemented
+ self.cursor.execute('select schema from schema')
+ schema = self.cursor.fetchone()
+ if schema:
+ self.database_schema = eval(schema[0])
+ else:
+ self.database_schema = {}
+
+ def save_dbschema(self, schema):
+ ''' Save the schema definition that the database currently implements
+ '''
+ s = repr(self.database_schema)
+ self.sql('insert into schema values (%s)', (s,))
def post_init(self):
''' Called once the schema initialisation has finished.
We should now confirm that the schema defined by our "classes"
attribute actually matches the schema in the database.
'''
+ save = self.upgrade_db()
+
# now detect changes in the schema
- save = 0
+ tables = self.database_schema['tables']
for classname, spec in self.classes.items():
- if self.database_schema.has_key(classname):
- dbspec = self.database_schema[classname]
+ if tables.has_key(classname):
+ dbspec = tables[classname]
if self.update_class(spec, dbspec):
- self.database_schema[classname] = spec.schema()
+ tables[classname] = spec.schema()
save = 1
else:
self.create_class(spec)
- self.database_schema[classname] = spec.schema()
+ tables[classname] = spec.schema()
save = 1
- for classname in self.database_schema.keys():
+ for classname, spec in tables.items():
if not self.classes.has_key(classname):
- self.drop_class(classname)
+ self.drop_class(classname, tables[classname])
+ del tables[classname]
+ save = 1
# update the database version of the schema
if save:
# commit
self.conn.commit()
+ # update this number when we need to make changes to the SQL structure
+ # of the backen database
+ current_db_version = 2
+ def upgrade_db(self):
+ ''' Update the SQL database to reflect changes in the backend code.
+
+ Return boolean whether we need to save the schema.
+ '''
+ version = self.database_schema.get('version', 1)
+ if version == self.current_db_version:
+ # nothing to do
+ return 0
+
+ if version == 1:
+ # version 1 doesn't have the OTK, session and indexing in the
+ # database
+ self.create_version_2_tables()
+ # version 1 also didn't have the actor column
+ self.add_actor_column()
+
+ self.database_schema['version'] = self.current_db_version
+ return 1
+
+
+ def refresh_database(self):
+ self.post_init()
+
def reindex(self):
for klass in self.classes.values():
for nodeid in klass.list():
"properties" is a list of (name, prop) where prop may be an
instance of a hyperdb "type" _or_ a string repr of that type.
'''
- cols = ['_activity', '_creator', '_creation']
+ cols = ['_actor', '_activity', '_creator', '_creation']
mls = []
# add the multilinks separately
for col, prop in properties:
cols.sort()
return cols, mls
- def update_class(self, spec, old_spec):
+ def update_class(self, spec, old_spec, force=0):
''' Determine the differences between the current spec and the
- database version of the spec, and update where necessary
- '''
- new_spec = spec
- new_has = new_spec.properties.has_key
+ database version of the spec, and update where necessary.
- new_spec = new_spec.schema()
+ If 'force' is true, update the database anyway.
+ '''
+ new_has = spec.properties.has_key
+ new_spec = spec.schema()
new_spec[1].sort()
old_spec[1].sort()
- if new_spec == old_spec:
+ if not force and new_spec == old_spec:
# no changes
return 0
if __debug__:
print >>hyperdb.DEBUG, 'update_class FIRING'
- # key property changed?
- if old_spec[0] != new_spec[0]:
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class setting keyprop', `spec[0]`
- # XXX turn on indexing for the key property
+ # detect key prop change for potential index change
+ keyprop_changes = 0
+ if new_spec[0] != old_spec[0]:
+ keyprop_changes = {'remove': old_spec[0], 'add': new_spec[0]}
# detect multilinks that have been removed, and drop their table
old_has = {}
- for name,prop in old_spec[1]:
+ for name, prop in old_spec[1]:
old_has[name] = 1
- if not new_has(name) and isinstance(prop, Multilink):
- # it's a multilink, and it's been removed - drop the old
- # table
- sql = 'drop table %s_%s'%(spec.classname, prop)
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql)
- self.cursor.execute(sql)
+ if new_has(name):
continue
- old_has = old_has.has_key
- # now figure how we populate the new table
- fetch = ['_activity', '_creation', '_creator']
- properties = spec.getprops()
- for propname,x in new_spec[1]:
- prop = properties[propname]
if isinstance(prop, Multilink):
- if not old_has(propname):
- # we need to create the new table
- self.create_multilink_table(spec, propname)
- elif old_has(propname):
- # we copy this col over from the old table
- fetch.append('_'+propname)
-
- # select the data out of the old table
- fetch.append('id')
- fetch.append('__retired__')
- fetchcols = ','.join(fetch)
- cn = spec.classname
- sql = 'select %s from _%s'%(fetchcols, cn)
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql)
- self.cursor.execute(sql)
- olddata = self.cursor.fetchall()
+ # first drop indexes.
+ self.drop_multilink_table_indexes(spec.classname, ml)
- # drop the old table
- self.cursor.execute('drop table _%s'%cn)
+ # now the multilink table itself
+ sql = 'drop table %s_%s'%(spec.classname, prop)
+ else:
+ # if this is the key prop, drop the index first
+ if old_spec[0] == prop:
+ self.drop_class_table_key_index(spec.classname, prop)
+ del keyprop_changes['remove']
- # create the new table
- self.create_class_table(spec)
+ # drop the column
+ sql = 'alter table _%s drop column _%s'%(spec.classname, prop)
- if olddata:
- # do the insert
- args = ','.join([self.arg for x in fetch])
- sql = 'insert into _%s (%s) values (%s)'%(cn, fetchcols, args)
if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql, olddata[0])
- for entry in olddata:
- self.cursor.execute(sql, tuple(entry))
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
+ old_has = old_has.has_key
+
+ # if we didn't remove the key prop just then, but the key prop has
+ # changed, we still need to remove the old index
+ if keyprop_changes.has_key('remove'):
+ self.drop_class_table_key_index(spec.classname,
+ keyprop_changes['remove'])
+
+ # add new columns
+ for propname, x in new_spec[1]:
+ if old_has(propname):
+ continue
+ sql = 'alter table _%s add column _%s varchar(255)'%(
+ spec.classname, propname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
+
+ # if the new column is a key prop, we need an index!
+ if new_spec[0] == propname:
+ self.create_class_table_key_index(spec.classname, propname)
+ del keyprop_changes['add']
+
+ # if we didn't add the key prop just then, but the key prop has
+ # changed, we still need to add the new index
+ if keyprop_changes.has_key('add'):
+ self.create_class_table_key_index(spec.classname,
+ keyprop_changes['add'])
return 1
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ self.create_class_table_indexes(spec)
+
return cols, mls
+ def create_class_table_indexes(self, spec):
+ ''' create the class table for the given spec
+ '''
+ # create id index
+ index_sql1 = 'create index _%s_id_idx on _%s(id)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql1)
+ self.cursor.execute(index_sql1)
+
+ # create __retired__ index
+ index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql2)
+ self.cursor.execute(index_sql2)
+
+ # create index for key property
+ if spec.key:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class setting keyprop %r'% \
+ spec.key
+ index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(
+ spec.classname, spec.key,
+ spec.classname, spec.key)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql3)
+ self.cursor.execute(index_sql3)
+
+ def drop_class_table_indexes(self, cn, key):
+ # drop the old table indexes first
+ l = ['_%s_id_idx'%cn, '_%s_retired_idx'%cn]
+ if key:
+ l.append('_%s_%s_idx'%(cn, key))
+
+ table_name = '_%s'%cn
+ for index_name in l:
+ if not self.sql_index_exists(table_name, index_name):
+ continue
+ index_sql = 'drop index '+index_name
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
+ def create_class_table_key_index(self, cn, key):
+ ''' create the class table for the given spec
+ '''
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class setting keyprop %r'% \
+ key
+ index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key,
+ cn, key)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql3)
+ self.cursor.execute(index_sql3)
+
+ def drop_class_table_key_index(self, cn, key):
+ table_name = '_%s'%cn
+ index_name = '_%s_%s_idx'%(cn, key)
+ if not self.sql_index_exists(table_name, index_name):
+ return
+ sql = 'drop index '+index_name
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, sql)
+ self.cursor.execute(sql)
+
def create_journal_table(self, spec):
''' create the journal table for a class given the spec and
already-determined cols
if __debug__:
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ self.create_journal_table_indexes(spec)
+
+ def create_journal_table_indexes(self, spec):
+ # index on nodeid
+ sql = 'create index %s_journ_idx on %s__journal(nodeid)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, sql)
+ self.cursor.execute(sql)
+
+ def drop_journal_table_indexes(self, classname):
+ index_name = '%s_journ_idx'%classname
+ if not self.sql_index_exists('%s__journal'%classname, index_name):
+ return
+ index_sql = 'drop index '+index_name
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ self.cursor.execute(index_sql)
def create_multilink_table(self, spec, ml):
''' Create a multilink table for the "ml" property of the class
given by the spec
'''
+ # create the table
sql = 'create table %s_%s (linkid varchar, nodeid varchar)'%(
spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ self.create_multilink_table_indexes(spec, ml)
+
+ def create_multilink_table_indexes(self, spec, ml):
+ # create index on linkid
+ index_sql = 'create index %s_%s_l_idx on %s_%s(linkid)'%(
+ spec.classname, ml, spec.classname, ml)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
+ # create index on nodeid
+ index_sql = 'create index %s_%s_n_idx on %s_%s(nodeid)'%(
+ spec.classname, ml, spec.classname, ml)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
+ def drop_multilink_table_indexes(self, classname, ml):
+ l = [
+ '%s_%s_l_idx'%(classname, ml),
+ '%s_%s_n_idx'%(classname, ml)
+ ]
+ table_name = '%s_%s'%(classname, ml)
+ for index_name in l:
+ if not self.sql_index_exists(table_name, index_name):
+ continue
+ index_sql = 'drop index %s'%index_name
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ self.cursor.execute(index_sql)
def create_class(self, spec):
''' Create a database table according to the given spec.
print >>hyperdb.DEBUG, 'create_class', (self, sql, vals)
self.cursor.execute(sql, vals)
- def drop_class(self, spec):
+ def drop_class(self, cn, spec):
''' Drop the given table from the database.
Drop the journal and multilink tables too.
'''
+ properties = spec[1]
# figure the multilinks
mls = []
- for col, prop in spec.properties.items():
+ for propanme, prop in properties:
if isinstance(prop, Multilink):
- mls.append(col)
+ mls.append(propname)
- sql = 'drop table _%s'%spec.classname
+ # drop class table and indexes
+ self.drop_class_table_indexes(cn, spec[0])
+ sql = 'drop table _%s'%cn
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
self.cursor.execute(sql)
- sql = 'drop table %s__journal'%spec.classname
+ # drop journal table and indexes
+ self.drop_journal_table_indexes(cn)
+ sql = 'drop table %s__journal'%cn
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
self.cursor.execute(sql)
for ml in mls:
+ # drop multilink table and indexes
+ self.drop_multilink_table_indexes(cn, ml)
sql = 'drop table %s_%s'%(spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
raise ValueError, cn
self.classes[cn] = cl
+ # add default Edit and View permissions
+ self.security.addPermission(name="Edit", klass=cn,
+ description="User is allowed to edit "+cn)
+ self.security.addPermission(name="View", klass=cn,
+ description="User is allowed to access "+cn)
+
def getclasses(self):
''' Return a list of the names of all existing classes.
'''
raise KeyError, 'There is no class called "%s"'%classname
def clear(self):
- ''' Delete all database contents.
+ '''Delete all database contents.
- Note: I don't commit here, which is different behaviour to the
- "nuke from orbit" behaviour in the *dbms.
+ Note: I don't commit here, which is different behaviour to the
+ "nuke from orbit" behaviour in the dbs.
'''
if __debug__:
print >>hyperdb.DEBUG, 'clear', (self,)
# calling code's node assumptions)
node = node.copy()
node['creation'] = node['activity'] = date.Date()
- node['creator'] = self.getuid()
+ node['actor'] = node['creator'] = self.getuid()
# default the non-multilink columns
for col, prop in cl.properties.items():
# add the special props
values = values.copy()
values['activity'] = date.Date()
+ values['actor'] = self.getuid()
# make db-friendly
values = self.serialise(classname, values)
for col in mls:
# get the link ids
sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg)
- self.cursor.execute(sql, (nodeid,))
+ self.sql(sql, (nodeid,))
# remove journal entries
sql = 'delete from %s__journal where nodeid=%s'%(classname, self.arg)
p = password.Password()
p.unpack(v)
d[k] = p
- elif (isinstance(prop, Boolean) or isinstance(prop, Number)) and v is not None:
- d[k]=float(v)
+ elif isinstance(prop, Boolean) and v is not None:
+ d[k] = int(v)
+ elif isinstance(prop, Number) and v is not None:
+ # try int first, then assume it's a float
+ try:
+ d[k] = int(v)
+ except ValueError:
+ d[k] = float(v)
else:
d[k] = v
return d
self.save_journal(classname, cols, nodeid, journaldate,
journaltag, action, params)
- def save_journal(self, classname, cols, nodeid, journaldate,
- journaltag, action, params):
- ''' Save the journal entry to the database
- '''
- raise NotImplemented
-
def getjournal(self, classname, nodeid):
''' get the journal for id
'''
cols = ','.join('nodeid date tag action params'.split())
return self.load_journal(classname, cols, nodeid)
+ def save_journal(self, classname, cols, nodeid, journaldate,
+ journaltag, action, params):
+ ''' Save the journal entry to the database
+ '''
+ # make the params db-friendly
+ params = repr(params)
+ entry = (nodeid, journaldate, journaltag, action, params)
+
+ # do the insert
+ a = self.arg
+ sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(classname,
+ cols, a, a, a, a, a)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry)
+ self.cursor.execute(sql, entry)
+
def load_journal(self, classname, cols, nodeid):
''' Load the journal from the database
'''
- raise NotImplemented
+ # now get the journal entries
+ sql = 'select %s from %s__journal where nodeid=%s'%(cols, classname,
+ self.arg)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'load_journal', (self, sql, nodeid)
+ self.cursor.execute(sql, (nodeid,))
+ res = []
+ for nodeid, date_stamp, user, action, params in self.cursor.fetchall():
+ params = eval(params)
+ res.append((nodeid, date.Date(date_stamp), user, action, params))
+ return res
def pack(self, pack_before):
''' Delete all journal entries except "create" before 'pack_before'.
def sql_commit(self):
''' Actually commit to the database.
'''
+ if __debug__:
+ print >>hyperdb.DEBUG, '+++ commit database connection +++'
self.conn.commit()
def commit(self):
# clear out the transactions
self.transactions = []
+ def sql_rollback(self):
+ self.conn.rollback()
+
def rollback(self):
''' Reverse all actions from the current transaction.
if __debug__:
print >>hyperdb.DEBUG, 'rollback', (self,)
- # roll back
- self.conn.rollback()
+ self.sql_rollback()
# roll back "other" transaction stuff
for method, args in self.transactions:
# return the classname, nodeid so we reindex this content
return (classname, nodeid)
+ def sql_close(self):
+ if __debug__:
+ print >>hyperdb.DEBUG, '+++ close database connection +++'
+ self.conn.close()
+
def close(self):
''' Close off the connection.
'''
- self.conn.close()
- if self.lockfile is not None:
- locking.release_lock(self.lockfile)
- if self.lockfile is not None:
- self.lockfile.close()
- self.lockfile = None
+ self.sql_close()
#
# The base Class class
or a ValueError is raised. The keyword arguments in 'properties'
must map names to property objects, or a TypeError is raised.
'''
- if (properties.has_key('creation') or properties.has_key('activity')
- or properties.has_key('creator')):
- raise ValueError, '"creation", "activity" and "creator" are '\
- 'reserved'
+ for name in 'creation activity creator actor'.split():
+ if properties.has_key(name):
+ raise ValueError, '"creation", "activity", "creator" and '\
+ '"actor" are reserved'
self.classname = classname
self.properties = properties
if self.db.journaltag is None:
raise DatabaseError, 'Database open read-only'
- if propvalues.has_key('creation') or propvalues.has_key('activity'):
- raise KeyError, '"creation" and "activity" are reserved'
+ if propvalues.has_key('creator') or propvalues.has_key('actor') or \
+ propvalues.has_key('creation') or propvalues.has_key('activity'):
+ raise KeyError, '"creator", "actor", "creation" and '\
+ '"activity" are reserved'
# new node's id
newid = self.db.newid(self.classname)
elif isinstance(proptype, hyperdb.Password):
value = str(value)
l.append(repr(value))
- l.append(self.is_retired(nodeid))
+ l.append(repr(self.is_retired(nodeid)))
return l
def import_list(self, propnames, proplist):
if newid is None:
newid = self.db.newid(self.classname)
+ # add the node and journal
+ self.db.addnode(self.classname, newid, d)
+
# retire?
if retire:
# use the arg for __retired__ to cope with any odd database type
print >>hyperdb.DEBUG, 'retire', (self, sql, newid)
self.db.cursor.execute(sql, (1, newid))
- # add the node and journal
- self.db.addnode(self.classname, newid, d)
-
# extract the extraneous journalling gumpf and nuke it
if d.has_key('creator'):
creator = d['creator']
creation = None
if d.has_key('activity'):
del d['activity']
+ if d.has_key('actor'):
+ del d['actor']
self.db.addjournal(self.classname, newid, 'create', {}, creator,
creation)
return newid
return d['creator']
else:
return self.db.getuid()
+ if propname == 'actor':
+ if d.has_key('actor'):
+ return d['actor']
+ else:
+ return self.db.getuid()
# get the property (raises KeyErorr if invalid)
prop = self.properties[propname]
return d[propname]
- def getnode(self, nodeid, cache=1):
- ''' Return a convenience wrapper for the node.
-
- 'nodeid' must be the id of an existing node of this class or an
- IndexError is raised.
-
- 'cache' exists for backwards compatibility, and is not used.
- '''
- return Node(self, nodeid)
-
def set(self, nodeid, **propvalues):
'''Modify a property on an existing node of this class.
if not propvalues:
return propvalues
- if propvalues.has_key('creation') or propvalues.has_key('activity'):
- raise KeyError, '"creation" and "activity" are reserved'
+ if propvalues.has_key('creation') or propvalues.has_key('creator') or \
+ propvalues.has_key('actor') or propvalues.has_key('activity'):
+ raise KeyError, '"creation", "creator", "actor" and '\
+ '"activity" are reserved'
if propvalues.has_key('id'):
raise KeyError, '"id" is reserved'
WARNING: this method should never be used except in extremely rare
situations where there could never be links to the node being
deleted
+
WARNING: use retire() instead
+
WARNING: the properties of this node will not be available ever again
+
WARNING: really, use retire() instead
Well, I think that's enough warnings. This method exists mostly to
None, or a TypeError is raised. The values of the key property on
all existing nodes must be unique or a ValueError is raised.
'''
- # XXX create an index on the key prop column
+ # XXX create an index on the key prop column. We should also
+ # record that we've created this index in the schema somewhere.
prop = self.getprops()[propname]
if not isinstance(prop, String):
raise TypeError, 'key properties must be String'
return self.key
def labelprop(self, default_to_id=0):
- ''' Return the property name for a label for the given node.
+ '''Return the property name for a label for the given node.
This method attempts to generate a consistent label for the node.
It tries the following in order:
- 1. key property
- 2. "name" property
- 3. "title" property
- 4. first property from the sorted property name list
+
+ 1. key property
+ 2. "name" property
+ 3. "title" property
+ 4. first property from the sorted property name list
'''
k = self.getkey()
if k:
'propspec' consists of keyword args propname=nodeid or
propname={nodeid:1, }
'propname' must be the name of a property in this class, or a
- KeyError is raised. That property must be a Link or Multilink
- property, or a TypeError is raised.
+ KeyError is raised. That property must be a Link or
+ Multilink property, or a TypeError is raised.
Any node in this class whose 'propname' property links to any of the
nodeids will be returned. Used by the full text indexing, which knows
raise TypeError, "'%s' not a Link/Multilink property"%propname
# first, links
- where = []
- allvalues = ()
a = self.db.arg
+ allvalues = (1,)
+ o = []
+ where = []
for prop, values in propspec:
if not isinstance(props[prop], hyperdb.Link):
continue
+ if type(values) is type({}) and len(values) == 1:
+ values = values.keys()[0]
if type(values) is type(''):
allvalues += (values,)
where.append('_%s = %s'%(prop, a))
else:
allvalues += tuple(values.keys())
where.append('_%s in (%s)'%(prop, ','.join([a]*len(values))))
- tables = []
+ tables = ['_%s'%self.classname]
if where:
- tables.append('select id as nodeid from _%s where %s'%(
- self.classname, ' and '.join(where)))
+ o.append('(' + ' and '.join(where) + ')')
# now multilinks
for prop, values in propspec:
if not isinstance(props[prop], hyperdb.Multilink):
continue
+ if not values:
+ continue
if type(values) is type(''):
allvalues += (values,)
s = a
else:
allvalues += tuple(values.keys())
s = ','.join([a]*len(values))
- tables.append('select nodeid from %s_%s where linkid in (%s)'%(
- self.classname, prop, s))
- sql = '\nunion\n'.join(tables)
+ tn = '%s_%s'%(self.classname, prop)
+ tables.append(tn)
+ o.append('(id=%s.nodeid and %s.linkid in (%s))'%(tn, tn, s))
+
+ if not o:
+ return []
+ elif len(o) > 1:
+ o = '(' + ' or '.join(['(%s)'%i for i in o]) + ')'
+ else:
+ o = o[0]
+ t = ', '.join(tables)
+ sql = 'select distinct(id) from %s where __retired__ <> %s and %s'%(t, a, o)
self.db.sql(sql, allvalues)
l = [x[0] for x in self.db.sql_fetchall()]
if __debug__:
args = []
for propname in requirements.keys():
prop = self.properties[propname]
- if isinstance(not prop, String):
+ if not isinstance(prop, String):
raise TypeError, "'%s' not a String property"%propname
where.append(propname)
args.append(requirements[propname].lower())
# generate the where clause
s = ' and '.join(['lower(_%s)=%s'%(col, self.db.arg) for col in where])
- sql = 'select id from _%s where %s'%(self.classname, s)
+ sql = 'select id from _%s where %s and __retired__=%s'%(self.classname,
+ s, self.db.arg)
+ args.append(0)
self.db.sql(sql, tuple(args))
l = [x[0] for x in self.db.sql_fetchall()]
if __debug__:
def filter(self, search_matches, filterspec, sort=(None,None),
group=(None,None)):
- ''' Return a list of the ids of the active nodes in this class that
- match the 'filter' spec, sorted by the group spec and then the
- sort spec
+ '''Return a list of the ids of the active nodes in this class that
+ match the 'filter' spec, sorted by the group spec and then the
+ sort spec
+
+ "filterspec" is {propname: value(s)}
- "filterspec" is {propname: value(s)}
- "sort" and "group" are (dir, prop) where dir is '+', '-' or None
- and prop is a prop name or None
- "search_matches" is {nodeid: marker}
+ "sort" and "group" are (dir, prop) where dir is '+', '-' or None
+ and prop is a prop name or None
- The filter must match all properties specificed - but if the
- property value to match is a list, any one of the values in the
- list may match for that property to match.
+ "search_matches" is {nodeid: marker}
+
+ The filter must match all properties specificed - but if the
+ property value to match is a list, any one of the values in the
+ list may match for that property to match.
'''
# just don't bother if the full-text search matched diddly
if search_matches == {}:
args = tuple(args)
if __debug__:
print >>hyperdb.DEBUG, 'filter', (self, sql, args)
- self.db.cursor.execute(sql, args)
- l = self.db.cursor.fetchall()
+ if args:
+ self.db.cursor.execute(sql, args)
+ else:
+ # psycopg doesn't like empty args
+ self.db.cursor.execute(sql)
+ l = self.db.sql_fetchall()
# return the IDs (the first column)
- # XXX The filter(None, l) bit is sqlite-specific... if there's _NO_
- # XXX matches to a fetch, it returns NULL instead of nothing!?!
- return filter(None, [row[0] for row in l])
+ return [row[0] for row in l]
def count(self):
'''Get the number of nodes in this class.
d['creation'] = hyperdb.Date()
d['activity'] = hyperdb.Date()
d['creator'] = hyperdb.Link('user')
+ d['actor'] = hyperdb.Link('user')
return d
def addprop(self, **properties):
'''The newly-created class automatically includes the "messages",
"files", "nosy", and "superseder" properties. If the 'properties'
dictionary attempts to specify any of these properties or a
- "creation" or "activity" property, a ValueError is raised.
+ "creation", "creator", "activity" or "actor" property, a ValueError
+ is raised.
'''
if not properties.has_key('title'):
properties['title'] = hyperdb.String(indexme='yes')