index a60c79d6646f1296bdcaddff6d7292e5826e3847..4c5760cdc56d4f3cb4594cb3c8a6ea0395460fc4 100644 (file)
-# $Id: rdbms_common.py,v 1.22 2002-10-08 04:11:16 richard Exp $
+# $Id: rdbms_common.py,v 1.66 2003-10-25 22:53:26 richard Exp $
''' Relational database (SQL) backend common code.
Basics:
sql_* methods, since everything else should be fairly generic. There's
probably a bit of work to be done if a database is used that actually
honors column typing, since the initial databases don't (sqlite stores
-everything as a string, and gadfly stores anything that's marsallable).
+everything as a string.)
'''
# standard python modules
# roundup modules
from roundup import hyperdb, date, password, roundupdb, security
from roundup.hyperdb import String, Password, Date, Interval, Link, \
- Multilink, DatabaseError, Boolean, Number
+ Multilink, DatabaseError, Boolean, Number, Node
+from roundup.backends import locking
# support
from blobfiles import FileStorage
from roundup.indexer import Indexer
-from sessions import Sessions
+from sessions import Sessions, OneTimeKeys
+from roundup.date import Range
# number of rows to keep in memory
ROW_CACHE_SIZE = 100
self.classes = {}
self.indexer = Indexer(self.dir)
self.sessions = Sessions(self.config)
+ self.otks = OneTimeKeys(self.config)
self.security = security.Security(self)
# additional transaction support for external files and the like
self.cache = {}
self.cache_lru = []
+ # database lock
+ self.lockfile = None
+
# open a connection to the database, creating the "conn" attribute
self.open_connection()
self.database_schema[classname] = spec.schema()
save = 1
- for classname in self.database_schema.keys():
+ for classname, spec in self.database_schema.items():
if not self.classes.has_key(classname):
- self.drop_class(classname)
+ self.drop_class(classname, spec)
+ del self.database_schema[classname]
+ save = 1
# update the database version of the schema
if save:
# commit
self.conn.commit()
- # figure the "curuserid"
- if self.journaltag is None:
- self.curuserid = None
- elif self.journaltag == 'admin':
- # admin user may not exist, but always has ID 1
- self.curuserid = '1'
- else:
- self.curuserid = self.user.lookup(self.journaltag)
+ def refresh_database(self):
+ # now detect changes in the schema
+ for classname, spec in self.classes.items():
+ dbspec = self.database_schema[classname]
+ self.update_class(spec, dbspec, force=1)
+ self.database_schema[classname] = spec.schema()
+ # update the database version of the schema
+ self.sql('delete from schema')
+ self.save_dbschema(self.database_schema)
+ # reindex the db
+ self.reindex()
+ # commit
+ self.conn.commit()
+
def reindex(self):
for klass in self.classes.values():
cols.sort()
return cols, mls
- def update_class(self, spec, dbspec):
+ def update_class(self, spec, old_spec, force=0):
''' Determine the differences between the current spec and the
- database version of the spec, and update where necessary
- '''
- spec_schema = spec.schema()
- if spec_schema == dbspec:
- # no save needed for this one
+ database version of the spec, and update where necessary.
+ If 'force' is true, update the database anyway.
+ '''
+ new_has = spec.properties.has_key
+ new_spec = spec.schema()
+ new_spec[1].sort()
+ old_spec[1].sort()
+ if not force and new_spec == old_spec:
+ # no changes
return 0
+
if __debug__:
print >>hyperdb.DEBUG, 'update_class FIRING'
- # key property changed?
- if dbspec[0] != spec_schema[0]:
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class setting keyprop', `spec[0]`
- # XXX turn on indexing for the key property
-
- # dict 'em up
- spec_propnames,spec_props = [],{}
- for propname,prop in spec_schema[1]:
- spec_propnames.append(propname)
- spec_props[propname] = prop
- dbspec_propnames,dbspec_props = [],{}
- for propname,prop in dbspec[1]:
- dbspec_propnames.append(propname)
- dbspec_props[propname] = prop
-
- # now compare
- for propname in spec_propnames:
- prop = spec_props[propname]
- if dbspec_props.has_key(propname) and prop==dbspec_props[propname]:
+ # detect multilinks that have been removed, and drop their table
+ old_has = {}
+ for name,prop in old_spec[1]:
+ old_has[name] = 1
+ if (force or not new_has(name)) and isinstance(prop, Multilink):
+ # it's a multilink, and it's been removed - drop the old
+ # table. First drop indexes.
+ index_sqls = [ 'drop index %s_%s_l_idx'%(spec.classname, ml),
+ 'drop index %s_%s_n_idx'%(spec.classname, ml) ]
+ for index_sql in index_sqls:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ try:
+ self.cursor.execute(index_sql)
+ except:
+ # The database may not actually have any indexes.
+ # assume the worst.
+ pass
+ sql = 'drop table %s_%s'%(spec.classname, prop)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
continue
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class ADD', (propname, prop)
+ old_has = old_has.has_key
- if not dbspec_props.has_key(propname):
- # add the property
- if isinstance(prop, Multilink):
- # all we have to do here is create a new table, easy!
+ # now figure how we populate the new table
+ fetch = ['_activity', '_creation', '_creator']
+ properties = spec.getprops()
+ for propname,x in new_spec[1]:
+ prop = properties[propname]
+ if isinstance(prop, Multilink):
+ if force or not old_has(propname):
+ # we need to create the new table
self.create_multilink_table(spec, propname)
- continue
-
- # no ALTER TABLE, so we:
- # 1. pull out the data, including an extra None column
- oldcols, x = self.determine_columns(dbspec[1])
- oldcols.append('id')
- oldcols.append('__retired__')
- cn = spec.classname
- sql = 'select %s,%s from _%s'%(','.join(oldcols), self.arg, cn)
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql, None)
- self.cursor.execute(sql, (None,))
- olddata = self.cursor.fetchall()
-
- # 2. drop the old table
- self.cursor.execute('drop table _%s'%cn)
-
- # 3. create the new table
- cols, mls = self.create_class_table(spec)
- # ensure the new column is last
- cols.remove('_'+propname)
- assert oldcols == cols, "Column lists don't match!"
- cols.append('_'+propname)
-
- # 4. populate with the data from step one
- s = ','.join([self.arg for x in cols])
- scols = ','.join(cols)
- sql = 'insert into _%s (%s) values (%s)'%(cn, scols, s)
-
- # GAH, nothing had better go wrong from here on in... but
- # we have to commit the drop...
- # XXX this isn't necessary in sqlite :(
- self.conn.commit()
-
- # do the insert
- for row in olddata:
- self.sql(sql, tuple(row))
+ elif old_has(propname):
+ # we copy this col over from the old table
+ fetch.append('_'+propname)
+
+ # select the data out of the old table
+ fetch.append('id')
+ fetch.append('__retired__')
+ fetchcols = ','.join(fetch)
+ cn = spec.classname
+ sql = 'select %s from _%s'%(fetchcols, cn)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
+ olddata = self.cursor.fetchall()
+
+ # drop the old table indexes first
+ index_sqls = [ 'drop index _%s_id_idx'%cn,
+ 'drop index _%s_retired_idx'%cn ]
+ if old_spec[0]:
+ index_sqls.append('drop index _%s_%s_idx'%(cn, old_spec[0]))
+ for index_sql in index_sqls:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ try:
+ self.cursor.execute(index_sql)
+ except:
+ # The database may not actually have any indexes.
+ # assume the worst.
+ pass
- else:
- # modify the property
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class NOOP'
- pass # NOOP in gadfly
+ # drop the old table
+ self.cursor.execute('drop table _%s'%cn)
- # and the other way - only worry about deletions here
- for propname in dbspec_propnames:
- prop = dbspec_props[propname]
- if spec_props.has_key(propname):
- continue
+ # create the new table
+ self.create_class_table(spec)
+
+ if olddata:
+ # do the insert
+ args = ','.join([self.arg for x in fetch])
+ sql = 'insert into _%s (%s) values (%s)'%(cn, fetchcols, args)
if __debug__:
- print >>hyperdb.DEBUG, 'update_class REMOVE', `prop`
+ print >>hyperdb.DEBUG, 'update_class', (self, sql, olddata[0])
+ for entry in olddata:
+ self.cursor.execute(sql, tuple(entry))
- # delete the property
- if isinstance(prop, Multilink):
- sql = 'drop table %s_%s'%(spec.classname, prop)
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql)
- self.cursor.execute(sql)
- else:
- # no ALTER TABLE, so we:
- # 1. pull out the data, excluding the removed column
- oldcols, x = self.determine_columns(spec.properties.items())
- oldcols.append('id')
- oldcols.append('__retired__')
- # remove the missing column
- oldcols.remove('_'+propname)
- cn = spec.classname
- sql = 'select %s from _%s'%(','.join(oldcols), cn)
- self.cursor.execute(sql, (None,))
- olddata = sql.fetchall()
-
- # 2. drop the old table
- self.cursor.execute('drop table _%s'%cn)
-
- # 3. create the new table
- cols, mls = self.create_class_table(self, spec)
- assert oldcols != cols, "Column lists don't match!"
-
- # 4. populate with the data from step one
- qs = ','.join([self.arg for x in cols])
- sql = 'insert into _%s values (%s)'%(cn, s)
- self.cursor.execute(sql, olddata)
return 1
def create_class_table(self, spec):
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ # create id index
+ index_sql1 = 'create index _%s_id_idx on _%s(id)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql1)
+ self.cursor.execute(index_sql1)
+
+ # create __retired__ index
+ index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql2)
+ self.cursor.execute(index_sql2)
+
+ # create index for key property
+ if spec.key:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class setting keyprop %r'% \
+ spec.key
+ index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(
+ spec.classname, spec.key,
+ spec.classname, spec.key)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql3)
+ self.cursor.execute(index_sql3)
+
return cols, mls
def create_journal_table(self, spec):
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ # index on nodeid
+ index_sql = 'create index %s_journ_idx on %s__journal(nodeid)'%(
+ spec.classname, spec.classname)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
def create_multilink_table(self, spec, ml):
''' Create a multilink table for the "ml" property of the class
given by the spec
'''
+ # create the table
sql = 'create table %s_%s (linkid varchar, nodeid varchar)'%(
spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'create_class', (self, sql)
self.cursor.execute(sql)
+ # create index on linkid
+ index_sql = 'create index %s_%s_l_idx on %s_%s(linkid)'%(
+ spec.classname, ml, spec.classname, ml)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
+ # create index on nodeid
+ index_sql = 'create index %s_%s_n_idx on %s_%s(nodeid)'%(
+ spec.classname, ml, spec.classname, ml)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
+ self.cursor.execute(index_sql)
+
def create_class(self, spec):
''' Create a database table according to the given spec.
'''
print >>hyperdb.DEBUG, 'create_class', (self, sql, vals)
self.cursor.execute(sql, vals)
- def drop_class(self, spec):
+ def drop_class(self, cn, spec):
''' Drop the given table from the database.
Drop the journal and multilink tables too.
'''
+ properties = spec[1]
# figure the multilinks
mls = []
- for col, prop in spec.properties.items():
+ for propanme, prop in properties:
if isinstance(prop, Multilink):
- mls.append(col)
+ mls.append(propname)
+
+ index_sqls = [ 'drop index _%s_id_idx'%cn,
+ 'drop index _%s_retired_idx'%cn,
+ 'drop index %s_journ_idx'%cn ]
+ if spec[0]:
+ index_sqls.append('drop index _%s_%s_idx'%(cn, spec[0]))
+ for index_sql in index_sqls:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ try:
+ self.cursor.execute(index_sql)
+ except:
+ # The database may not actually have any indexes.
+ # assume the worst.
+ pass
- sql = 'drop table _%s'%spec.classname
+ sql = 'drop table _%s'%cn
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
self.cursor.execute(sql)
- sql = 'drop table %s__journal'%spec.classname
+ sql = 'drop table %s__journal'%cn
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
self.cursor.execute(sql)
for ml in mls:
+ index_sqls = [
+ 'drop index %s_%s_n_idx'%(cn, ml),
+ 'drop index %s_%s_l_idx'%(cn, ml),
+ ]
+ for index_sql in index_sqls:
+ if __debug__:
+ print >>hyperdb.DEBUG, 'drop_index', (self, index_sql)
+ try:
+ self.cursor.execute(index_sql)
+ except:
+ # The database may not actually have any indexes.
+ # assume the worst.
+ pass
sql = 'drop table %s_%s'%(spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'drop_class', (self, sql)
#
# Nodes
#
-
def addnode(self, classname, nodeid, node):
''' Add the specified node to its class's db.
'''
if __debug__:
print >>hyperdb.DEBUG, 'addnode', (self, classname, nodeid, node)
- # gadfly requires values for all non-multilink columns
+
+ # determine the column definitions and multilink tables
cl = self.classes[classname]
cols, mls = self.determine_columns(cl.properties.items())
# calling code's node assumptions)
node = node.copy()
node['creation'] = node['activity'] = date.Date()
- node['creator'] = self.curuserid
+ node['creator'] = self.getuid()
# default the non-multilink columns
for col, prop in cl.properties.items():
- if not isinstance(col, Multilink):
- if not node.has_key(col):
+ if not node.has_key(col):
+ if isinstance(prop, Multilink):
+ node[col] = []
+ else:
node[col] = None
# clear this node out of the cache if it's in there
p = password.Password()
p.unpack(v)
d[k] = p
+ elif (isinstance(prop, Boolean) or isinstance(prop, Number)) and v is not None:
+ d[k]=float(v)
else:
d[k] = v
return d
self.cursor.execute(sql)
return self.cursor.fetchone()[0]
- def getnodeids(self, classname, retired=0):
- ''' Retrieve all the ids of the nodes for a particular Class.
-
- Set retired=None to get all nodes. Otherwise it'll get all the
- retired or non-retired nodes, depending on the flag.
- '''
- # flip the sense of the flag if we don't want all of them
- if retired is not None:
- retired = not retired
- sql = 'select id from _%s where __retired__ <> %s'%(classname, self.arg)
- if __debug__:
- print >>hyperdb.DEBUG, 'getnodeids', (self, sql, retired)
- self.cursor.execute(sql, (retired,))
- return [x[0] for x in self.cursor.fetchall()]
-
def addjournal(self, classname, nodeid, action, params, creator=None,
creation=None):
''' Journal the Action
if creator:
journaltag = creator
else:
- journaltag = self.curuserid
+ journaltag = self.getuid()
if creation:
journaldate = creation.serialise()
else:
self.rollbackStoreFile(*args)
self.transactions = []
+ # clear the cache
+ self.clearCache()
+
def doSaveNode(self, classname, nodeid, node):
''' dummy that just generates a reindex event
'''
''' Close off the connection.
'''
self.conn.close()
+ if self.lockfile is not None:
+ locking.release_lock(self.lockfile)
+ if self.lockfile is not None:
+ self.lockfile.close()
+ self.lockfile = None
#
# The base Class class
# do the db-related init stuff
db.addclass(self)
- self.auditors = {'create': [], 'set': [], 'retire': []}
- self.reactors = {'create': [], 'set': [], 'retire': []}
+ self.auditors = {'create': [], 'set': [], 'retire': [], 'restore': []}
+ self.reactors = {'create': [], 'set': [], 'retire': [], 'restore': []}
def schema(self):
''' A dumpable version of the schema that we can store in the
If an id in a link or multilink property does not refer to a valid
node, an IndexError is raised.
'''
+ self.fireAuditors('create', None, propvalues)
+ newid = self.create_inner(**propvalues)
+ self.fireReactors('create', newid, None)
+ return newid
+
+ def create_inner(self, **propvalues):
+ ''' Called by create, in-between the audit and react calls.
+ '''
if propvalues.has_key('id'):
raise KeyError, '"id" is reserved'
if propvalues.has_key('creation') or propvalues.has_key('activity'):
raise KeyError, '"creation" and "activity" are reserved'
- self.fireAuditors('create', None, propvalues)
-
# new node's id
newid = self.db.newid(self.classname)
(self.classname, newid, key))
elif isinstance(prop, String):
- if type(value) != type(''):
+ if type(value) != type('') and type(value) != type(u''):
raise TypeError, 'new property "%s" not a string'%key
elif isinstance(prop, Password):
# done
self.db.addnode(self.classname, newid, propvalues)
if self.do_journal:
- self.db.addjournal(self.classname, newid, 'create', propvalues)
-
- self.fireReactors('create', newid, None)
+ self.db.addjournal(self.classname, newid, 'create', {})
return newid
elif isinstance(proptype, hyperdb.Password):
value = str(value)
l.append(repr(value))
+ l.append(self.is_retired(nodeid))
return l
def import_list(self, propnames, proplist):
# make the new node's property map
d = {}
+ retire = 0
+ newid = None
for i in range(len(propnames)):
# Use eval to reverse the repr() used to output the CSV
value = eval(proplist[i])
# Figure the property for this column
propname = propnames[i]
- prop = properties[propname]
# "unmarshal" where necessary
if propname == 'id':
newid = value
continue
+ elif propname == 'is retired':
+ # is the item retired?
+ if int(value):
+ retire = 1
+ continue
elif value is None:
+ d[propname] = None
+ continue
+
+ prop = properties[propname]
+ if value is None:
# don't set Nones
continue
elif isinstance(prop, hyperdb.Date):
value = pwd
d[propname] = value
+ # get a new id if necessary
+ if newid is None:
+ newid = self.db.newid(self.classname)
+
+ # retire?
+ if retire:
+ # use the arg for __retired__ to cope with any odd database type
+ # conversion (hello, sqlite)
+ sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
+ self.db.arg, self.db.arg)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'retire', (self, sql, newid)
+ self.db.cursor.execute(sql, (1, newid))
+
# add the node and journal
self.db.addnode(self.classname, newid, d)
creation = None
if d.has_key('activity'):
del d['activity']
- self.db.addjournal(self.classname, newid, 'create', d, creator,
+ self.db.addjournal(self.classname, newid, 'create', {}, creator,
creation)
return newid
IndexError is raised. 'propname' must be the name of a property
of this class or a KeyError is raised.
- 'cache' indicates whether the transaction cache should be queried
- for the node. If the node has been modified and you need to
- determine what its values prior to modification are, you need to
- set cache=0.
+ 'cache' exists for backwards compatibility, and is not used.
'''
if propname == 'id':
return nodeid
if d.has_key('creator'):
return d['creator']
else:
- return self.db.curuserid
+ return self.db.getuid()
# get the property (raises KeyErorr if invalid)
prop = self.properties[propname]
'nodeid' must be the id of an existing node of this class or an
IndexError is raised.
- 'cache' indicates whether the transaction cache should be queried
- for the node. If the node has been modified and you need to
- determine what its values prior to modification are, you need to
- set cache=0.
+ 'cache' exists for backwards compatibility, and is not used.
'''
- return Node(self, nodeid, cache=cache)
+ return Node(self, nodeid)
def set(self, nodeid, **propvalues):
'''Modify a property on an existing node of this class.
# if the value's the same as the existing value, no sense in
# doing anything
- if node.has_key(propname) and value == node[propname]:
+ current = node.get(propname, None)
+ if value == current:
del propvalues[propname]
continue
+ journalvalues[propname] = current
# do stuff based on the prop type
if isinstance(prop, Link):
journalvalues[propname] = tuple(l)
elif isinstance(prop, String):
- if value is not None and type(value) != type(''):
+ if value is not None and type(value) != type('') and type(value) != type(u''):
raise TypeError, 'new property "%s" not a string'%propname
elif isinstance(prop, Password):
self.db.setnode(self.classname, nodeid, propvalues, multilink_changes)
if self.do_journal:
- propvalues.update(journalvalues)
- self.db.addjournal(self.classname, nodeid, 'set', propvalues)
+ self.db.addjournal(self.classname, nodeid, 'set', journalvalues)
self.fireReactors('set', nodeid, oldvalues)
if self.db.journaltag is None:
raise DatabaseError, 'Database open read-only'
+ self.fireAuditors('retire', nodeid, None)
+
# use the arg for __retired__ to cope with any odd database type
# conversion (hello, sqlite)
sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
if __debug__:
print >>hyperdb.DEBUG, 'retire', (self, sql, nodeid)
self.db.cursor.execute(sql, (1, nodeid))
+ if self.do_journal:
+ self.db.addjournal(self.classname, nodeid, 'retired', None)
+
+ self.fireReactors('retire', nodeid, None)
+
+ def restore(self, nodeid):
+ '''Restore a retired node.
+
+ Make node available for all operations like it was before retirement.
+ '''
+ if self.db.journaltag is None:
+ raise DatabaseError, 'Database open read-only'
+
+ node = self.db.getnode(self.classname, nodeid)
+ # check if key property was overrided
+ key = self.getkey()
+ try:
+ id = self.lookup(node[key])
+ except KeyError:
+ pass
+ else:
+ raise KeyError, "Key property (%s) of retired node clashes with \
+ existing one (%s)" % (key, node[key])
+ self.fireAuditors('restore', nodeid, None)
+ # use the arg for __retired__ to cope with any odd database type
+ # conversion (hello, sqlite)
+ sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
+ self.db.arg, self.db.arg)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'restore', (self, sql, nodeid)
+ self.db.cursor.execute(sql, (0, nodeid))
+ if self.do_journal:
+ self.db.addjournal(self.classname, nodeid, 'restored', None)
+
+ self.fireReactors('restore', nodeid, None)
+
def is_retired(self, nodeid):
'''Return true if the node is rerired
'''
The returned list contains tuples of the form
- (date, tag, action, params)
+ (nodeid, date, tag, action, params)
'date' is a Timestamp object specifying the time of the change and
'tag' is the journaltag specified when the database was opened.
None, or a TypeError is raised. The values of the key property on
all existing nodes must be unique or a ValueError is raised.
'''
- # XXX create an index on the key prop column
+ # XXX create an index on the key prop column. We should also
+ # record that we've created this index in the schema somewhere.
prop = self.getprops()[propname]
if not isinstance(prop, String):
raise TypeError, 'key properties must be String'
if type(values) is type(''):
allvalues += (values,)
where.append('_%s = %s'%(prop, a))
+ elif values is None:
+ where.append('_%s is NULL'%prop)
else:
allvalues += tuple(values.keys())
where.append('_%s in (%s)'%(prop, ','.join([a]*len(values))))
args.append(requirements[propname].lower())
# generate the where clause
- s = ' and '.join(['_%s=%s'%(col, self.db.arg) for col in where])
+ s = ' and '.join(['lower(_%s)=%s'%(col, self.db.arg) for col in where])
sql = 'select id from _%s where %s'%(self.classname, s)
self.db.sql(sql, tuple(args))
l = [x[0] for x in self.db.sql_fetchall()]
def list(self):
''' Return a list of the ids of the active nodes in this class.
'''
- return self.db.getnodeids(self.classname, retired=0)
+ return self.getnodeids(retired=0)
+
+ def getnodeids(self, retired=None):
+ ''' Retrieve all the ids of the nodes for a particular Class.
+
+ Set retired=None to get all nodes. Otherwise it'll get all the
+ retired or non-retired nodes, depending on the flag.
+ '''
+ # flip the sense of the 'retired' flag if we don't want all of them
+ if retired is not None:
+ if retired:
+ args = (0, )
+ else:
+ args = (1, )
+ sql = 'select id from _%s where __retired__ <> %s'%(self.classname,
+ self.db.arg)
+ else:
+ args = ()
+ sql = 'select id from _%s'%self.classname
+ if __debug__:
+ print >>hyperdb.DEBUG, 'getnodeids', (self, sql, retired)
+ self.db.cursor.execute(sql, args)
+ ids = [x[0] for x in self.db.cursor.fetchall()]
+ return ids
def filter(self, search_matches, filterspec, sort=(None,None),
group=(None,None)):
cn = self.classname
+ timezone = self.db.getUserTimezone()
+
# figure the WHERE clause from the filterspec
props = self.getprops()
frum = ['_'+cn]
# now do other where clause stuff
if isinstance(propclass, Multilink):
tn = '%s_%s'%(cn, k)
- frum.append(tn)
- if isinstance(v, type([])):
+ if v in ('-1', ['-1']):
+ # only match rows that have count(linkid)=0 in the
+ # corresponding multilink table)
+ where.append('id not in (select nodeid from %s)'%tn)
+ elif isinstance(v, type([])):
+ frum.append(tn)
s = ','.join([a for x in v])
where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s))
args = args + v
else:
- where.append('id=%s.nodeid and %s.linkid = %s'%(tn, tn, a))
+ frum.append(tn)
+ where.append('id=%s.nodeid and %s.linkid=%s'%(tn, tn, a))
+ args.append(v)
+ elif k == 'id':
+ if isinstance(v, type([])):
+ s = ','.join([a for x in v])
+ where.append('%s in (%s)'%(k, s))
+ args = args + v
+ else:
+ where.append('%s=%s'%(k, a))
args.append(v)
elif isinstance(propclass, String):
if not isinstance(v, type([])):
elif isinstance(propclass, Link):
if isinstance(v, type([])):
if '-1' in v:
+ v = v[:]
v.remove('-1')
xtra = ' or _%s is NULL'%k
else:
xtra = ''
- s = ','.join([a for x in v])
- where.append('(_%s in (%s)%s)'%(k, s, xtra))
- args = args + v
+ if v:
+ s = ','.join([a for x in v])
+ where.append('(_%s in (%s)%s)'%(k, s, xtra))
+ args = args + v
+ else:
+ where.append('_%s is NULL'%k)
else:
if v == '-1':
v = None
else:
where.append('_%s=%s'%(k, a))
args.append(v)
+ elif isinstance(propclass, Date):
+ if isinstance(v, type([])):
+ s = ','.join([a for x in v])
+ where.append('_%s in (%s)'%(k, s))
+ args = args + [date.Date(x).serialise() for x in v]
+ else:
+ try:
+ # Try to filter on range of dates
+ date_rng = Range(v, date.Date, offset=timezone)
+ if (date_rng.from_value):
+ where.append('_%s >= %s'%(k, a))
+ args.append(date_rng.from_value.serialise())
+ if (date_rng.to_value):
+ where.append('_%s <= %s'%(k, a))
+ args.append(date_rng.to_value.serialise())
+ except ValueError:
+ # If range creation fails - ignore that search parameter
+ pass
+ elif isinstance(propclass, Interval):
+ if isinstance(v, type([])):
+ s = ','.join([a for x in v])
+ where.append('_%s in (%s)'%(k, s))
+ args = args + [date.Interval(x).serialise() for x in v]
+ else:
+ try:
+ # Try to filter on range of intervals
+ date_rng = Range(v, date.Interval)
+ if (date_rng.from_value):
+ where.append('_%s >= %s'%(k, a))
+ args.append(date_rng.from_value.serialise())
+ if (date_rng.to_value):
+ where.append('_%s <= %s'%(k, a))
+ args.append(date_rng.to_value.serialise())
+ except ValueError:
+ # If range creation fails - ignore that search parameter
+ pass
+ #where.append('_%s=%s'%(k, a))
+ #args.append(date.Interval(v).serialise())
else:
if isinstance(v, type([])):
s = ','.join([a for x in v])
where.append('_%s=%s'%(k, a))
args.append(v)
+ # don't match retired nodes
+ where.append('__retired__ <> 1')
+
# add results of full text search
if search_matches is not None:
v = search_matches.keys()
for react in self.reactors[action]:
react(self.db, self, nodeid, oldvalues)
-class FileClass(Class):
+class FileClass(Class, hyperdb.FileClass):
'''This class defines a large chunk of data. To support this, it has a
mandatory String property "content" which is typically saved off
externally to the hyperdb.
def create(self, **propvalues):
''' snaffle the file propvalue and store in a file
'''
+ # we need to fire the auditors now, or the content property won't
+ # be in propvalues for the auditors to play with
+ self.fireAuditors('create', None, propvalues)
+
+ # now remove the content property so it's not stored in the db
content = propvalues['content']
del propvalues['content']
- newid = Class.create(self, **propvalues)
+
+ # do the database create
+ newid = Class.create_inner(self, **propvalues)
+
+ # fire reactors
+ self.fireReactors('create', newid, None)
+
+ # store off the content as a file
self.db.storefile(self.classname, newid, None, content)
return newid
_marker = []
def get(self, nodeid, propname, default=_marker, cache=1):
- ''' trap the content propname and get it from the file
- '''
+ ''' Trap the content propname and get it from the file
+ 'cache' exists for backwards compatibility, and is not used.
+ '''
poss_msg = 'Possibly a access right configuration problem.'
if propname == 'content':
try:
return 'ERROR reading file: %s%s\n%s\n%s'%(
self.classname, nodeid, poss_msg, strerror)
if default is not self._marker:
- return Class.get(self, nodeid, propname, default, cache=cache)
+ return Class.get(self, nodeid, propname, default)
else:
- return Class.get(self, nodeid, propname, cache=cache)
+ return Class.get(self, nodeid, propname)
def getprops(self, protected=1):
''' In addition to the actual properties on the node, these methods