1 #
2 # Copyright (c) 2001 Bizar Software Pty Ltd (http://www.bizarsoftware.com.au/)
3 # This module is free software, and you may redistribute it and/or modify
4 # under the same terms as Python, so long as this copyright message and
5 # disclaimer are retained in their original form.
6 #
7 # IN NO EVENT SHALL BIZAR SOFTWARE PTY LTD BE LIABLE TO ANY PARTY FOR
8 # DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING
9 # OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE
10 # POSSIBILITY OF SUCH DAMAGE.
11 #
12 # BIZAR SOFTWARE PTY LTD SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
13 # BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
14 # FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS"
15 # BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
16 # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
17 #
18 """ Relational database (SQL) backend common code.
20 Basics:
22 - map roundup classes to relational tables
23 - automatically detect schema changes and modify the table schemas
24 appropriately (we store the "database version" of the schema in the
25 database itself as the only row of the "schema" table)
26 - multilinks (which represent a many-to-many relationship) are handled through
27 intermediate tables
28 - journals are stored adjunct to the per-class tables
29 - table names and columns have "_" prepended so the names can't clash with
30 restricted names (like "order")
31 - retirement is determined by the __retired__ column being > 0
33 Database-specific changes may generally be pushed out to the overridable
34 sql_* methods, since everything else should be fairly generic. There's
35 probably a bit of work to be done if a database is used that actually
36 honors column typing, since the initial databases don't (sqlite stores
37 everything as a string.)
39 The schema of the hyperdb being mapped to the database is stored in the
40 database itself as a repr()'ed dictionary of information about each Class
41 that maps to a table. If that information differs from the hyperdb schema,
42 then we update it. We also store in the schema dict a version which
43 allows us to upgrade the database schema when necessary. See upgrade_db().
45 To force a unqiueness constraint on the key properties we put the item
46 id into the __retired__ column duing retirement (so it's 0 for "active"
47 items) and place a unqiueness constraint on key + __retired__. This is
48 particularly important for the users class where multiple users may
49 try to have the same username, with potentially many retired users with
50 the same name.
51 """
52 __docformat__ = 'restructuredtext'
54 # standard python modules
55 import sys, os, time, re, errno, weakref, copy, logging, datetime
57 # roundup modules
58 from roundup import hyperdb, date, password, roundupdb, security, support
59 from roundup.hyperdb import String, Password, Date, Interval, Link, \
60 Multilink, DatabaseError, Boolean, Number, Node
61 from roundup.backends import locking
62 from roundup.support import reversed
63 from roundup.i18n import _
66 # support
67 from roundup.backends.blobfiles import FileStorage
68 try:
69 from roundup.backends.indexer_xapian import Indexer
70 except ImportError:
71 from roundup.backends.indexer_rdbms import Indexer
72 from roundup.backends.sessions_rdbms import Sessions, OneTimeKeys
73 from roundup.date import Range
75 from roundup.backends.back_anydbm import compile_expression
78 # dummy value meaning "argument not passed"
79 _marker = []
81 def _num_cvt(num):
82 num = str(num)
83 try:
84 return int(num)
85 except:
86 return float(num)
88 def _bool_cvt(value):
89 if value in ('TRUE', 'FALSE'):
90 return {'TRUE': 1, 'FALSE': 0}[value]
91 # assume it's a number returned from the db API
92 return int(value)
94 def date_to_hyperdb_value(d):
95 """ convert date d to a roundup date """
96 if isinstance (d, datetime.datetime):
97 return date.Date(d)
98 return date.Date (str(d).replace(' ', '.'))
101 def connection_dict(config, dbnamestr=None):
102 """ Used by Postgresql and MySQL to detemine the keyword args for
103 opening the database connection."""
104 d = { }
105 if dbnamestr:
106 d[dbnamestr] = config.RDBMS_NAME
107 for name in ('host', 'port', 'password', 'user', 'read_default_group',
108 'read_default_file'):
109 cvar = 'RDBMS_'+name.upper()
110 if config[cvar] is not None:
111 d[name] = config[cvar]
112 return d
115 class IdListOptimizer:
116 """ To prevent flooding the SQL parser of the underlaying
117 db engine with "x IN (1, 2, 3, ..., <large number>)" collapses
118 these cases to "x BETWEEN 1 AND <large number>".
119 """
121 def __init__(self):
122 self.ranges = []
123 self.singles = []
125 def append(self, nid):
126 """ Invariant: nid are ordered ascending """
127 if self.ranges:
128 last = self.ranges[-1]
129 if last[1] == nid-1:
130 last[1] = nid
131 return
132 if self.singles:
133 last = self.singles[-1]
134 if last == nid-1:
135 self.singles.pop()
136 self.ranges.append([last, nid])
137 return
138 self.singles.append(nid)
140 def where(self, field, placeholder):
141 ranges = self.ranges
142 singles = self.singles
144 if not singles and not ranges: return "(1=0)", []
146 if ranges:
147 between = '%s BETWEEN %s AND %s' % (
148 field, placeholder, placeholder)
149 stmnt = [between] * len(ranges)
150 else:
151 stmnt = []
152 if singles:
153 stmnt.append('%s in (%s)' % (
154 field, ','.join([placeholder]*len(singles))))
156 return '(%s)' % ' OR '.join(stmnt), sum(ranges, []) + singles
158 def __str__(self):
159 return "ranges: %r / singles: %r" % (self.ranges, self.singles)
162 class Database(FileStorage, hyperdb.Database, roundupdb.Database):
163 """ Wrapper around an SQL database that presents a hyperdb interface.
165 - some functionality is specific to the actual SQL database, hence
166 the sql_* methods that are NotImplemented
167 - we keep a cache of the latest N row fetches (where N is configurable).
168 """
169 def __init__(self, config, journaltag=None):
170 """ Open the database and load the schema from it.
171 """
172 FileStorage.__init__(self, config.UMASK)
173 self.config, self.journaltag = config, journaltag
174 self.dir = config.DATABASE
175 self.classes = {}
176 self.indexer = Indexer(self)
177 self.security = security.Security(self)
179 # additional transaction support for external files and the like
180 self.transactions = []
182 # keep a cache of the N most recently retrieved rows of any kind
183 # (classname, nodeid) = row
184 self.cache_size = config.RDBMS_CACHE_SIZE
185 self.clearCache()
186 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
187 'filtering': 0}
189 # database lock
190 self.lockfile = None
192 # open a connection to the database, creating the "conn" attribute
193 self.open_connection()
195 def clearCache(self):
196 self.cache = {}
197 self.cache_lru = []
199 def getSessionManager(self):
200 return Sessions(self)
202 def getOTKManager(self):
203 return OneTimeKeys(self)
205 def open_connection(self):
206 """ Open a connection to the database, creating it if necessary.
208 Must call self.load_dbschema()
209 """
210 raise NotImplemented
212 def sql(self, sql, args=None, cursor=None):
213 """ Execute the sql with the optional args.
214 """
215 self.log_debug('SQL %r %r'%(sql, args))
216 if not cursor:
217 cursor = self.cursor
218 if args:
219 cursor.execute(sql, args)
220 else:
221 cursor.execute(sql)
223 def sql_fetchone(self):
224 """ Fetch a single row. If there's nothing to fetch, return None.
225 """
226 return self.cursor.fetchone()
228 def sql_fetchall(self):
229 """ Fetch all rows. If there's nothing to fetch, return [].
230 """
231 return self.cursor.fetchall()
233 def sql_fetchiter(self):
234 """ Fetch all row as a generator
235 """
236 while True:
237 row = self.cursor.fetchone()
238 if not row: break
239 yield row
241 def sql_stringquote(self, value):
242 """ Quote the string so it's safe to put in the 'sql quotes'
243 """
244 return re.sub("'", "''", str(value))
246 def init_dbschema(self):
247 self.database_schema = {
248 'version': self.current_db_version,
249 'tables': {}
250 }
252 def load_dbschema(self):
253 """ Load the schema definition that the database currently implements
254 """
255 self.cursor.execute('select schema from schema')
256 schema = self.cursor.fetchone()
257 if schema:
258 self.database_schema = eval(schema[0])
259 else:
260 self.database_schema = {}
262 def save_dbschema(self):
263 """ Save the schema definition that the database currently implements
264 """
265 s = repr(self.database_schema)
266 self.sql('delete from schema')
267 self.sql('insert into schema values (%s)'%self.arg, (s,))
269 def post_init(self):
270 """ Called once the schema initialisation has finished.
272 We should now confirm that the schema defined by our "classes"
273 attribute actually matches the schema in the database.
274 """
275 save = 0
277 # handle changes in the schema
278 tables = self.database_schema['tables']
279 for classname, spec in self.classes.iteritems():
280 if classname in tables:
281 dbspec = tables[classname]
282 if self.update_class(spec, dbspec):
283 tables[classname] = spec.schema()
284 save = 1
285 else:
286 self.create_class(spec)
287 tables[classname] = spec.schema()
288 save = 1
290 for classname, spec in list(tables.items()):
291 if classname not in self.classes:
292 self.drop_class(classname, tables[classname])
293 del tables[classname]
294 save = 1
296 # now upgrade the database for column type changes, new internal
297 # tables, etc.
298 save = save | self.upgrade_db()
300 # update the database version of the schema
301 if save:
302 self.save_dbschema()
304 # reindex the db if necessary
305 if self.indexer.should_reindex():
306 self.reindex()
308 # commit
309 self.sql_commit()
311 # update this number when we need to make changes to the SQL structure
312 # of the backen database
313 current_db_version = 5
314 db_version_updated = False
315 def upgrade_db(self):
316 """ Update the SQL database to reflect changes in the backend code.
318 Return boolean whether we need to save the schema.
319 """
320 version = self.database_schema.get('version', 1)
321 if version > self.current_db_version:
322 raise DatabaseError('attempting to run rev %d DATABASE with rev '
323 '%d CODE!'%(version, self.current_db_version))
324 if version == self.current_db_version:
325 # nothing to do
326 return 0
328 if version < 2:
329 self.log_info('upgrade to version 2')
330 # change the schema structure
331 self.database_schema = {'tables': self.database_schema}
333 # version 1 didn't have the actor column (note that in
334 # MySQL this will also transition the tables to typed columns)
335 self.add_new_columns_v2()
337 # version 1 doesn't have the OTK, session and indexing in the
338 # database
339 self.create_version_2_tables()
341 if version < 3:
342 self.log_info('upgrade to version 3')
343 self.fix_version_2_tables()
345 if version < 4:
346 self.fix_version_3_tables()
348 if version < 5:
349 self.fix_version_4_tables()
351 self.database_schema['version'] = self.current_db_version
352 self.db_version_updated = True
353 return 1
355 def fix_version_3_tables(self):
356 # drop the shorter VARCHAR OTK column and add a new TEXT one
357 for name in ('otk', 'session'):
358 self.sql('DELETE FROM %ss'%name)
359 self.sql('ALTER TABLE %ss DROP %s_value'%(name, name))
360 self.sql('ALTER TABLE %ss ADD %s_value TEXT'%(name, name))
362 def fix_version_2_tables(self):
363 # Default (used by sqlite): NOOP
364 pass
366 def fix_version_4_tables(self):
367 # note this is an explicit call now
368 c = self.cursor
369 for cn, klass in self.classes.iteritems():
370 c.execute('select id from _%s where __retired__<>0'%(cn,))
371 for (id,) in c.fetchall():
372 c.execute('update _%s set __retired__=%s where id=%s'%(cn,
373 self.arg, self.arg), (id, id))
375 if klass.key:
376 self.add_class_key_required_unique_constraint(cn, klass.key)
378 def _convert_journal_tables(self):
379 """Get current journal table contents, drop the table and re-create"""
380 c = self.cursor
381 cols = ','.join('nodeid date tag action params'.split())
382 for klass in self.classes.itervalues():
383 # slurp and drop
384 sql = 'select %s from %s__journal order by date'%(cols,
385 klass.classname)
386 c.execute(sql)
387 contents = c.fetchall()
388 self.drop_journal_table_indexes(klass.classname)
389 c.execute('drop table %s__journal'%klass.classname)
391 # re-create and re-populate
392 self.create_journal_table(klass)
393 a = self.arg
394 sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(
395 klass.classname, cols, a, a, a, a, a)
396 for row in contents:
397 # no data conversion needed
398 self.cursor.execute(sql, row)
400 def _convert_string_properties(self):
401 """Get current Class tables that contain String properties, and
402 convert the VARCHAR columns to TEXT"""
403 c = self.cursor
404 for klass in self.classes.itervalues():
405 # slurp and drop
406 cols, mls = self.determine_columns(list(klass.properties.iteritems()))
407 scols = ','.join([i[0] for i in cols])
408 sql = 'select id,%s from _%s'%(scols, klass.classname)
409 c.execute(sql)
410 contents = c.fetchall()
411 self.drop_class_table_indexes(klass.classname, klass.getkey())
412 c.execute('drop table _%s'%klass.classname)
414 # re-create and re-populate
415 self.create_class_table(klass, create_sequence=0)
416 a = ','.join([self.arg for i in range(len(cols)+1)])
417 sql = 'insert into _%s (id,%s) values (%s)'%(klass.classname,
418 scols, a)
419 for row in contents:
420 l = []
421 for entry in row:
422 # mysql will already be a string - psql needs "help"
423 if entry is not None and not isinstance(entry, type('')):
424 entry = str(entry)
425 l.append(entry)
426 self.cursor.execute(sql, l)
428 def refresh_database(self):
429 self.post_init()
432 def reindex(self, classname=None, show_progress=False):
433 if classname:
434 classes = [self.getclass(classname)]
435 else:
436 classes = list(self.classes.itervalues())
437 for klass in classes:
438 if show_progress:
439 for nodeid in support.Progress('Reindex %s'%klass.classname,
440 klass.list()):
441 klass.index(nodeid)
442 else:
443 for nodeid in klass.list():
444 klass.index(nodeid)
445 self.indexer.save_index()
447 hyperdb_to_sql_datatypes = {
448 hyperdb.String : 'TEXT',
449 hyperdb.Date : 'TIMESTAMP',
450 hyperdb.Link : 'INTEGER',
451 hyperdb.Interval : 'VARCHAR(255)',
452 hyperdb.Password : 'VARCHAR(255)',
453 hyperdb.Boolean : 'BOOLEAN',
454 hyperdb.Number : 'REAL',
455 }
457 def hyperdb_to_sql_datatype(self, propclass):
459 datatype = self.hyperdb_to_sql_datatypes.get(propclass)
460 if datatype:
461 return datatype
463 for k, v in self.hyperdb_to_sql_datatypes.iteritems():
464 if issubclass(propclass, k):
465 return v
467 raise ValueError('%r is not a hyperdb property class' % propclass)
469 def determine_columns(self, properties):
470 """ Figure the column names and multilink properties from the spec
472 "properties" is a list of (name, prop) where prop may be an
473 instance of a hyperdb "type" _or_ a string repr of that type.
474 """
475 cols = [
476 ('_actor', self.hyperdb_to_sql_datatype(hyperdb.Link)),
477 ('_activity', self.hyperdb_to_sql_datatype(hyperdb.Date)),
478 ('_creator', self.hyperdb_to_sql_datatype(hyperdb.Link)),
479 ('_creation', self.hyperdb_to_sql_datatype(hyperdb.Date)),
480 ]
481 mls = []
482 # add the multilinks separately
483 for col, prop in properties:
484 if isinstance(prop, Multilink):
485 mls.append(col)
486 continue
488 if isinstance(prop, type('')):
489 raise ValueError("string property spec!")
490 #and prop.find('Multilink') != -1:
491 #mls.append(col)
493 datatype = self.hyperdb_to_sql_datatype(prop.__class__)
494 cols.append(('_'+col, datatype))
496 # Intervals stored as two columns
497 if isinstance(prop, Interval):
498 cols.append(('__'+col+'_int__', 'BIGINT'))
500 cols.sort()
501 return cols, mls
503 def update_class(self, spec, old_spec, force=0):
504 """ Determine the differences between the current spec and the
505 database version of the spec, and update where necessary.
507 If 'force' is true, update the database anyway.
508 """
509 new_spec = spec.schema()
510 new_spec[1].sort()
511 old_spec[1].sort()
512 if not force and new_spec == old_spec:
513 # no changes
514 return 0
516 if not self.config.RDBMS_ALLOW_ALTER:
517 raise DatabaseError(_('ALTER operation disallowed: %r -> %r.'%(old_spec, new_spec)))
519 logger = logging.getLogger('roundup.hyperdb')
520 logger.info('update_class %s'%spec.classname)
522 logger.debug('old_spec %r'%(old_spec,))
523 logger.debug('new_spec %r'%(new_spec,))
525 # detect key prop change for potential index change
526 keyprop_changes = {}
527 if new_spec[0] != old_spec[0]:
528 if old_spec[0]:
529 keyprop_changes['remove'] = old_spec[0]
530 if new_spec[0]:
531 keyprop_changes['add'] = new_spec[0]
533 # detect multilinks that have been removed, and drop their table
534 old_has = {}
535 for name, prop in old_spec[1]:
536 old_has[name] = 1
537 if name in spec.properties:
538 continue
540 if prop.find('Multilink to') != -1:
541 # first drop indexes.
542 self.drop_multilink_table_indexes(spec.classname, name)
544 # now the multilink table itself
545 sql = 'drop table %s_%s'%(spec.classname, name)
546 else:
547 # if this is the key prop, drop the index first
548 if old_spec[0] == prop:
549 self.drop_class_table_key_index(spec.classname, name)
550 del keyprop_changes['remove']
552 # drop the column
553 sql = 'alter table _%s drop column _%s'%(spec.classname, name)
555 self.sql(sql)
557 # if we didn't remove the key prop just then, but the key prop has
558 # changed, we still need to remove the old index
559 if 'remove' in keyprop_changes:
560 self.drop_class_table_key_index(spec.classname,
561 keyprop_changes['remove'])
563 # add new columns
564 for propname, prop in new_spec[1]:
565 if propname in old_has:
566 continue
567 prop = spec.properties[propname]
568 if isinstance(prop, Multilink):
569 self.create_multilink_table(spec, propname)
570 else:
571 # add the column
572 coltype = self.hyperdb_to_sql_datatype(prop.__class__)
573 sql = 'alter table _%s add column _%s %s'%(
574 spec.classname, propname, coltype)
575 self.sql(sql)
577 # extra Interval column
578 if isinstance(prop, Interval):
579 sql = 'alter table _%s add column __%s_int__ BIGINT'%(
580 spec.classname, propname)
581 self.sql(sql)
583 # if the new column is a key prop, we need an index!
584 if new_spec[0] == propname:
585 self.create_class_table_key_index(spec.classname, propname)
586 del keyprop_changes['add']
588 # if we didn't add the key prop just then, but the key prop has
589 # changed, we still need to add the new index
590 if 'add' in keyprop_changes:
591 self.create_class_table_key_index(spec.classname,
592 keyprop_changes['add'])
594 return 1
596 def determine_all_columns(self, spec):
597 """Figure out the columns from the spec and also add internal columns
599 """
600 cols, mls = self.determine_columns(list(spec.properties.iteritems()))
602 # add on our special columns
603 cols.append(('id', 'INTEGER PRIMARY KEY'))
604 cols.append(('__retired__', 'INTEGER DEFAULT 0'))
605 return cols, mls
607 def create_class_table(self, spec):
608 """Create the class table for the given Class "spec". Creates the
609 indexes too."""
610 cols, mls = self.determine_all_columns(spec)
612 # create the base table
613 scols = ','.join(['%s %s'%x for x in cols])
614 sql = 'create table _%s (%s)'%(spec.classname, scols)
615 self.sql(sql)
617 self.create_class_table_indexes(spec)
619 return cols, mls
621 def create_class_table_indexes(self, spec):
622 """ create the class table for the given spec
623 """
624 # create __retired__ index
625 index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%(
626 spec.classname, spec.classname)
627 self.sql(index_sql2)
629 # create index for key property
630 if spec.key:
631 index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(
632 spec.classname, spec.key,
633 spec.classname, spec.key)
634 self.sql(index_sql3)
636 # and the unique index for key / retired(id)
637 self.add_class_key_required_unique_constraint(spec.classname,
638 spec.key)
640 # TODO: create indexes on (selected?) Link property columns, as
641 # they're more likely to be used for lookup
643 def add_class_key_required_unique_constraint(self, cn, key):
644 sql = '''create unique index _%s_key_retired_idx
645 on _%s(__retired__, _%s)'''%(cn, cn, key)
646 self.sql(sql)
648 def drop_class_table_indexes(self, cn, key):
649 # drop the old table indexes first
650 l = ['_%s_id_idx'%cn, '_%s_retired_idx'%cn]
651 if key:
652 l.append('_%s_%s_idx'%(cn, key))
654 table_name = '_%s'%cn
655 for index_name in l:
656 if not self.sql_index_exists(table_name, index_name):
657 continue
658 index_sql = 'drop index '+index_name
659 self.sql(index_sql)
661 def create_class_table_key_index(self, cn, key):
662 """ create the class table for the given spec
663 """
664 sql = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key, cn, key)
665 self.sql(sql)
667 def drop_class_table_key_index(self, cn, key):
668 table_name = '_%s'%cn
669 index_name = '_%s_%s_idx'%(cn, key)
670 if self.sql_index_exists(table_name, index_name):
671 sql = 'drop index '+index_name
672 self.sql(sql)
674 # and now the retired unique index too
675 index_name = '_%s_key_retired_idx'%cn
676 if self.sql_index_exists(table_name, index_name):
677 sql = 'drop index '+index_name
678 self.sql(sql)
680 def create_journal_table(self, spec):
681 """ create the journal table for a class given the spec and
682 already-determined cols
683 """
684 # journal table
685 cols = ','.join(['%s varchar'%x
686 for x in 'nodeid date tag action params'.split()])
687 sql = """create table %s__journal (
688 nodeid integer, date %s, tag varchar(255),
689 action varchar(255), params text)""" % (spec.classname,
690 self.hyperdb_to_sql_datatype(hyperdb.Date))
691 self.sql(sql)
692 self.create_journal_table_indexes(spec)
694 def create_journal_table_indexes(self, spec):
695 # index on nodeid
696 sql = 'create index %s_journ_idx on %s__journal(nodeid)'%(
697 spec.classname, spec.classname)
698 self.sql(sql)
700 def drop_journal_table_indexes(self, classname):
701 index_name = '%s_journ_idx'%classname
702 if not self.sql_index_exists('%s__journal'%classname, index_name):
703 return
704 index_sql = 'drop index '+index_name
705 self.sql(index_sql)
707 def create_multilink_table(self, spec, ml):
708 """ Create a multilink table for the "ml" property of the class
709 given by the spec
710 """
711 # create the table
712 sql = 'create table %s_%s (linkid INTEGER, nodeid INTEGER)'%(
713 spec.classname, ml)
714 self.sql(sql)
715 self.create_multilink_table_indexes(spec, ml)
717 def create_multilink_table_indexes(self, spec, ml):
718 # create index on linkid
719 index_sql = 'create index %s_%s_l_idx on %s_%s(linkid)'%(
720 spec.classname, ml, spec.classname, ml)
721 self.sql(index_sql)
723 # create index on nodeid
724 index_sql = 'create index %s_%s_n_idx on %s_%s(nodeid)'%(
725 spec.classname, ml, spec.classname, ml)
726 self.sql(index_sql)
728 def drop_multilink_table_indexes(self, classname, ml):
729 l = [
730 '%s_%s_l_idx'%(classname, ml),
731 '%s_%s_n_idx'%(classname, ml)
732 ]
733 table_name = '%s_%s'%(classname, ml)
734 for index_name in l:
735 if not self.sql_index_exists(table_name, index_name):
736 continue
737 index_sql = 'drop index %s'%index_name
738 self.sql(index_sql)
740 def create_class(self, spec):
741 """ Create a database table according to the given spec.
742 """
744 if not self.config.RDBMS_ALLOW_CREATE:
745 raise DatabaseError(_('CREATE operation disallowed: "%s".'%spec.classname))
747 cols, mls = self.create_class_table(spec)
748 self.create_journal_table(spec)
750 # now create the multilink tables
751 for ml in mls:
752 self.create_multilink_table(spec, ml)
754 def drop_class(self, cn, spec):
755 """ Drop the given table from the database.
757 Drop the journal and multilink tables too.
758 """
760 if not self.config.RDBMS_ALLOW_DROP:
761 raise DatabaseError(_('DROP operation disallowed: "%s".'%cn))
763 properties = spec[1]
764 # figure the multilinks
765 mls = []
766 for propname, prop in properties:
767 if isinstance(prop, Multilink):
768 mls.append(propname)
770 # drop class table and indexes
771 self.drop_class_table_indexes(cn, spec[0])
773 self.drop_class_table(cn)
775 # drop journal table and indexes
776 self.drop_journal_table_indexes(cn)
777 sql = 'drop table %s__journal'%cn
778 self.sql(sql)
780 for ml in mls:
781 # drop multilink table and indexes
782 self.drop_multilink_table_indexes(cn, ml)
783 sql = 'drop table %s_%s'%(spec.classname, ml)
784 self.sql(sql)
786 def drop_class_table(self, cn):
787 sql = 'drop table _%s'%cn
788 self.sql(sql)
790 #
791 # Classes
792 #
793 def __getattr__(self, classname):
794 """ A convenient way of calling self.getclass(classname).
795 """
796 if classname in self.classes:
797 return self.classes[classname]
798 raise AttributeError(classname)
800 def addclass(self, cl):
801 """ Add a Class to the hyperdatabase.
802 """
803 cn = cl.classname
804 if cn in self.classes:
805 raise ValueError(cn)
806 self.classes[cn] = cl
808 # add default Edit and View permissions
809 self.security.addPermission(name="Create", klass=cn,
810 description="User is allowed to create "+cn)
811 self.security.addPermission(name="Edit", klass=cn,
812 description="User is allowed to edit "+cn)
813 self.security.addPermission(name="View", klass=cn,
814 description="User is allowed to access "+cn)
816 def getclasses(self):
817 """ Return a list of the names of all existing classes.
818 """
819 return sorted(self.classes)
821 def getclass(self, classname):
822 """Get the Class object representing a particular class.
824 If 'classname' is not a valid class name, a KeyError is raised.
825 """
826 try:
827 return self.classes[classname]
828 except KeyError:
829 raise KeyError('There is no class called "%s"'%classname)
831 def clear(self):
832 """Delete all database contents.
834 Note: I don't commit here, which is different behaviour to the
835 "nuke from orbit" behaviour in the dbs.
836 """
837 logging.getLogger('roundup.hyperdb').info('clear')
838 for cn in self.classes:
839 sql = 'delete from _%s'%cn
840 self.sql(sql)
842 #
843 # Nodes
844 #
846 hyperdb_to_sql_value = {
847 hyperdb.String : str,
848 # fractional seconds by default
849 hyperdb.Date : lambda x: x.formal(sep=' ', sec='%06.3f'),
850 hyperdb.Link : int,
851 hyperdb.Interval : str,
852 hyperdb.Password : str,
853 hyperdb.Boolean : lambda x: x and 'TRUE' or 'FALSE',
854 hyperdb.Number : lambda x: x,
855 hyperdb.Multilink : lambda x: x, # used in journal marshalling
856 }
858 def to_sql_value(self, propklass):
860 fn = self.hyperdb_to_sql_value.get(propklass)
861 if fn:
862 return fn
864 for k, v in self.hyperdb_to_sql_value.iteritems():
865 if issubclass(propklass, k):
866 return v
868 raise ValueError('%r is not a hyperdb property class' % propklass)
870 def _cache_del(self, key):
871 del self.cache[key]
872 self.cache_lru.remove(key)
874 def _cache_refresh(self, key):
875 self.cache_lru.remove(key)
876 self.cache_lru.insert(0, key)
878 def _cache_save(self, key, node):
879 self.cache[key] = node
880 # update the LRU
881 self.cache_lru.insert(0, key)
882 if len(self.cache_lru) > self.cache_size:
883 del self.cache[self.cache_lru.pop()]
885 def addnode(self, classname, nodeid, node):
886 """ Add the specified node to its class's db.
887 """
888 self.log_debug('addnode %s%s %r'%(classname,
889 nodeid, node))
891 # determine the column definitions and multilink tables
892 cl = self.classes[classname]
893 cols, mls = self.determine_columns(list(cl.properties.iteritems()))
895 # we'll be supplied these props if we're doing an import
896 values = node.copy()
897 if 'creator' not in values:
898 # add in the "calculated" properties (dupe so we don't affect
899 # calling code's node assumptions)
900 values['creation'] = values['activity'] = date.Date()
901 values['actor'] = values['creator'] = self.getuid()
903 cl = self.classes[classname]
904 props = cl.getprops(protected=1)
905 del props['id']
907 # default the non-multilink columns
908 for col, prop in props.iteritems():
909 if col not in values:
910 if isinstance(prop, Multilink):
911 values[col] = []
912 else:
913 values[col] = None
915 # clear this node out of the cache if it's in there
916 key = (classname, nodeid)
917 if key in self.cache:
918 self._cache_del(key)
920 # figure the values to insert
921 vals = []
922 for col,dt in cols:
923 # this is somewhat dodgy....
924 if col.endswith('_int__'):
925 # XXX eugh, this test suxxors
926 value = values[col[2:-6]]
927 # this is an Interval special "int" column
928 if value is not None:
929 vals.append(value.as_seconds())
930 else:
931 vals.append(value)
932 continue
934 prop = props[col[1:]]
935 value = values[col[1:]]
936 if value is not None:
937 value = self.to_sql_value(prop.__class__)(value)
938 vals.append(value)
939 vals.append(nodeid)
940 vals = tuple(vals)
942 # make sure the ordering is correct for column name -> column value
943 s = ','.join([self.arg for x in cols]) + ',%s'%self.arg
944 cols = ','.join([col for col,dt in cols]) + ',id'
946 # perform the inserts
947 sql = 'insert into _%s (%s) values (%s)'%(classname, cols, s)
948 self.sql(sql, vals)
950 # insert the multilink rows
951 for col in mls:
952 t = '%s_%s'%(classname, col)
953 for entry in node[col]:
954 sql = 'insert into %s (linkid, nodeid) values (%s,%s)'%(t,
955 self.arg, self.arg)
956 self.sql(sql, (entry, nodeid))
958 def setnode(self, classname, nodeid, values, multilink_changes={}):
959 """ Change the specified node.
960 """
961 self.log_debug('setnode %s%s %r'
962 % (classname, nodeid, values))
964 # clear this node out of the cache if it's in there
965 key = (classname, nodeid)
966 if key in self.cache:
967 self._cache_del(key)
969 cl = self.classes[classname]
970 props = cl.getprops()
972 cols = []
973 mls = []
974 # add the multilinks separately
975 for col in values:
976 prop = props[col]
977 if isinstance(prop, Multilink):
978 mls.append(col)
979 elif isinstance(prop, Interval):
980 # Intervals store the seconds value too
981 cols.append(col)
982 # extra leading '_' added by code below
983 cols.append('_' +col + '_int__')
984 else:
985 cols.append(col)
986 cols.sort()
988 # figure the values to insert
989 vals = []
990 for col in cols:
991 if col.endswith('_int__'):
992 # XXX eugh, this test suxxors
993 # Intervals store the seconds value too
994 col = col[1:-6]
995 prop = props[col]
996 value = values[col]
997 if value is None:
998 vals.append(None)
999 else:
1000 vals.append(value.as_seconds())
1001 else:
1002 prop = props[col]
1003 value = values[col]
1004 if value is None:
1005 e = None
1006 else:
1007 e = self.to_sql_value(prop.__class__)(value)
1008 vals.append(e)
1010 vals.append(int(nodeid))
1011 vals = tuple(vals)
1013 # if there's any updates to regular columns, do them
1014 if cols:
1015 # make sure the ordering is correct for column name -> column value
1016 s = ','.join(['_%s=%s'%(x, self.arg) for x in cols])
1017 cols = ','.join(cols)
1019 # perform the update
1020 sql = 'update _%s set %s where id=%s'%(classname, s, self.arg)
1021 self.sql(sql, vals)
1023 # we're probably coming from an import, not a change
1024 if not multilink_changes:
1025 for name in mls:
1026 prop = props[name]
1027 value = values[name]
1029 t = '%s_%s'%(classname, name)
1031 # clear out previous values for this node
1032 # XXX numeric ids
1033 self.sql('delete from %s where nodeid=%s'%(t, self.arg),
1034 (nodeid,))
1036 # insert the values for this node
1037 for entry in values[name]:
1038 sql = 'insert into %s (linkid, nodeid) values (%s,%s)'%(t,
1039 self.arg, self.arg)
1040 # XXX numeric ids
1041 self.sql(sql, (entry, nodeid))
1043 # we have multilink changes to apply
1044 for col, (add, remove) in multilink_changes.iteritems():
1045 tn = '%s_%s'%(classname, col)
1046 if add:
1047 sql = 'insert into %s (nodeid, linkid) values (%s,%s)'%(tn,
1048 self.arg, self.arg)
1049 for addid in add:
1050 # XXX numeric ids
1051 self.sql(sql, (int(nodeid), int(addid)))
1052 if remove:
1053 s = ','.join([self.arg]*len(remove))
1054 sql = 'delete from %s where nodeid=%s and linkid in (%s)'%(tn,
1055 self.arg, s)
1056 # XXX numeric ids
1057 self.sql(sql, [int(nodeid)] + remove)
1059 sql_to_hyperdb_value = {
1060 hyperdb.String : str,
1061 hyperdb.Date : date_to_hyperdb_value,
1062 # hyperdb.Link : int, # XXX numeric ids
1063 hyperdb.Link : str,
1064 hyperdb.Interval : date.Interval,
1065 hyperdb.Password : lambda x: password.Password(encrypted=x),
1066 hyperdb.Boolean : _bool_cvt,
1067 hyperdb.Number : _num_cvt,
1068 hyperdb.Multilink : lambda x: x, # used in journal marshalling
1069 }
1071 def to_hyperdb_value(self, propklass):
1073 fn = self.sql_to_hyperdb_value.get(propklass)
1074 if fn:
1075 return fn
1077 for k, v in self.sql_to_hyperdb_value.iteritems():
1078 if issubclass(propklass, k):
1079 return v
1081 raise ValueError('%r is not a hyperdb property class' % propklass)
1083 def _materialize_multilink(self, classname, nodeid, node, propname):
1084 """ evaluation of single Multilink (lazy eval may have skipped this)
1085 """
1086 if propname not in node:
1087 sql = 'select linkid from %s_%s where nodeid=%s'%(classname,
1088 propname, self.arg)
1089 self.sql(sql, (nodeid,))
1090 # extract the first column from the result
1091 # XXX numeric ids
1092 items = [int(x[0]) for x in self.cursor.fetchall()]
1093 items.sort ()
1094 node[propname] = [str(x) for x in items]
1096 def _materialize_multilinks(self, classname, nodeid, node, props=None):
1097 """ get all Multilinks of a node (lazy eval may have skipped this)
1098 """
1099 cl = self.classes[classname]
1100 props = props or [pn for (pn, p) in cl.properties.iteritems()
1101 if isinstance(p, Multilink)]
1102 for propname in props:
1103 if propname not in node:
1104 self._materialize_multilink(classname, nodeid, node, propname)
1106 def getnode(self, classname, nodeid, fetch_multilinks=True):
1107 """ Get a node from the database.
1108 For optimisation optionally we don't fetch multilinks
1109 (lazy Multilinks).
1110 But for internal database operations we need them.
1111 """
1112 # see if we have this node cached
1113 key = (classname, nodeid)
1114 if key in self.cache:
1115 # push us back to the top of the LRU
1116 self._cache_refresh(key)
1117 if __debug__:
1118 self.stats['cache_hits'] += 1
1119 # return the cached information
1120 if fetch_multilinks:
1121 self._materialize_multilinks(classname, nodeid, self.cache[key])
1122 return self.cache[key]
1124 if __debug__:
1125 self.stats['cache_misses'] += 1
1126 start_t = time.time()
1128 # figure the columns we're fetching
1129 cl = self.classes[classname]
1130 cols, mls = self.determine_columns(list(cl.properties.iteritems()))
1131 scols = ','.join([col for col,dt in cols])
1133 # perform the basic property fetch
1134 sql = 'select %s from _%s where id=%s'%(scols, classname, self.arg)
1135 self.sql(sql, (nodeid,))
1137 values = self.sql_fetchone()
1138 if values is None:
1139 raise IndexError('no such %s node %s'%(classname, nodeid))
1141 # make up the node
1142 node = {}
1143 props = cl.getprops(protected=1)
1144 for col in range(len(cols)):
1145 name = cols[col][0][1:]
1146 if name.endswith('_int__'):
1147 # XXX eugh, this test suxxors
1148 # ignore the special Interval-as-seconds column
1149 continue
1150 value = values[col]
1151 if value is not None:
1152 value = self.to_hyperdb_value(props[name].__class__)(value)
1153 node[name] = value
1155 if fetch_multilinks and mls:
1156 self._materialize_multilinks(classname, nodeid, node, mls)
1158 # save off in the cache
1159 key = (classname, nodeid)
1160 self._cache_save(key, node)
1162 if __debug__:
1163 self.stats['get_items'] += (time.time() - start_t)
1165 return node
1167 def destroynode(self, classname, nodeid):
1168 """Remove a node from the database. Called exclusively by the
1169 destroy() method on Class.
1170 """
1171 logging.getLogger('roundup.hyperdb').info('destroynode %s%s'%(
1172 classname, nodeid))
1174 # make sure the node exists
1175 if not self.hasnode(classname, nodeid):
1176 raise IndexError('%s has no node %s'%(classname, nodeid))
1178 # see if we have this node cached
1179 if (classname, nodeid) in self.cache:
1180 del self.cache[(classname, nodeid)]
1182 # see if there's any obvious commit actions that we should get rid of
1183 for entry in self.transactions[:]:
1184 if entry[1][:2] == (classname, nodeid):
1185 self.transactions.remove(entry)
1187 # now do the SQL
1188 sql = 'delete from _%s where id=%s'%(classname, self.arg)
1189 self.sql(sql, (nodeid,))
1191 # remove from multilnks
1192 cl = self.getclass(classname)
1193 x, mls = self.determine_columns(list(cl.properties.iteritems()))
1194 for col in mls:
1195 # get the link ids
1196 sql = 'delete from %s_%s where nodeid=%s'%(classname, col, self.arg)
1197 self.sql(sql, (nodeid,))
1199 # remove journal entries
1200 sql = 'delete from %s__journal where nodeid=%s'%(classname, self.arg)
1201 self.sql(sql, (nodeid,))
1203 # cleanup any blob filestorage when we commit
1204 self.transactions.append((FileStorage.destroy, (self, classname, nodeid)))
1206 def hasnode(self, classname, nodeid):
1207 """ Determine if the database has a given node.
1208 """
1209 # If this node is in the cache, then we do not need to go to
1210 # the database. (We don't consider this an LRU hit, though.)
1211 if (classname, nodeid) in self.cache:
1212 # Return 1, not True, to match the type of the result of
1213 # the SQL operation below.
1214 return 1
1215 sql = 'select count(*) from _%s where id=%s'%(classname, self.arg)
1216 self.sql(sql, (nodeid,))
1217 return int(self.cursor.fetchone()[0])
1219 def countnodes(self, classname):
1220 """ Count the number of nodes that exist for a particular Class.
1221 """
1222 sql = 'select count(*) from _%s'%classname
1223 self.sql(sql)
1224 return self.cursor.fetchone()[0]
1226 def addjournal(self, classname, nodeid, action, params, creator=None,
1227 creation=None):
1228 """ Journal the Action
1229 'action' may be:
1231 'create' or 'set' -- 'params' is a dictionary of property values
1232 'link' or 'unlink' -- 'params' is (classname, nodeid, propname)
1233 'retire' -- 'params' is None
1234 """
1235 # handle supply of the special journalling parameters (usually
1236 # supplied on importing an existing database)
1237 if creator:
1238 journaltag = creator
1239 else:
1240 journaltag = self.getuid()
1241 if creation:
1242 journaldate = creation
1243 else:
1244 journaldate = date.Date()
1246 # create the journal entry
1247 cols = 'nodeid,date,tag,action,params'
1249 self.log_debug('addjournal %s%s %r %s %s %r'%(classname,
1250 nodeid, journaldate, journaltag, action, params))
1252 # make the journalled data marshallable
1253 if isinstance(params, type({})):
1254 self._journal_marshal(params, classname)
1256 params = repr(params)
1258 dc = self.to_sql_value(hyperdb.Date)
1259 journaldate = dc(journaldate)
1261 self.save_journal(classname, cols, nodeid, journaldate,
1262 journaltag, action, params)
1264 def setjournal(self, classname, nodeid, journal):
1265 """Set the journal to the "journal" list."""
1266 # clear out any existing entries
1267 self.sql('delete from %s__journal where nodeid=%s'%(classname,
1268 self.arg), (nodeid,))
1270 # create the journal entry
1271 cols = 'nodeid,date,tag,action,params'
1273 dc = self.to_sql_value(hyperdb.Date)
1274 for nodeid, journaldate, journaltag, action, params in journal:
1275 self.log_debug('addjournal %s%s %r %s %s %r'%(
1276 classname, nodeid, journaldate, journaltag, action,
1277 params))
1279 # make the journalled data marshallable
1280 if isinstance(params, type({})):
1281 self._journal_marshal(params, classname)
1282 params = repr(params)
1284 self.save_journal(classname, cols, nodeid, dc(journaldate),
1285 journaltag, action, params)
1287 def _journal_marshal(self, params, classname):
1288 """Convert the journal params values into safely repr'able and
1289 eval'able values."""
1290 properties = self.getclass(classname).getprops()
1291 for param, value in params.iteritems():
1292 if not value:
1293 continue
1294 property = properties[param]
1295 cvt = self.to_sql_value(property.__class__)
1296 if isinstance(property, Password):
1297 params[param] = cvt(value)
1298 elif isinstance(property, Date):
1299 params[param] = cvt(value)
1300 elif isinstance(property, Interval):
1301 params[param] = cvt(value)
1302 elif isinstance(property, Boolean):
1303 params[param] = cvt(value)
1305 def getjournal(self, classname, nodeid):
1306 """ get the journal for id
1307 """
1308 # make sure the node exists
1309 if not self.hasnode(classname, nodeid):
1310 raise IndexError('%s has no node %s'%(classname, nodeid))
1312 cols = ','.join('nodeid date tag action params'.split())
1313 journal = self.load_journal(classname, cols, nodeid)
1315 # now unmarshal the data
1316 dc = self.to_hyperdb_value(hyperdb.Date)
1317 res = []
1318 properties = self.getclass(classname).getprops()
1319 for nodeid, date_stamp, user, action, params in journal:
1320 params = eval(params)
1321 if isinstance(params, type({})):
1322 for param, value in params.iteritems():
1323 if not value:
1324 continue
1325 property = properties.get(param, None)
1326 if property is None:
1327 # deleted property
1328 continue
1329 cvt = self.to_hyperdb_value(property.__class__)
1330 if isinstance(property, Password):
1331 params[param] = password.JournalPassword(value)
1332 elif isinstance(property, Date):
1333 params[param] = cvt(value)
1334 elif isinstance(property, Interval):
1335 params[param] = cvt(value)
1336 elif isinstance(property, Boolean):
1337 params[param] = cvt(value)
1338 # XXX numeric ids
1339 res.append((str(nodeid), dc(date_stamp), user, action, params))
1340 return res
1342 def save_journal(self, classname, cols, nodeid, journaldate,
1343 journaltag, action, params):
1344 """ Save the journal entry to the database
1345 """
1346 entry = (nodeid, journaldate, journaltag, action, params)
1348 # do the insert
1349 a = self.arg
1350 sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(
1351 classname, cols, a, a, a, a, a)
1352 self.sql(sql, entry)
1354 def load_journal(self, classname, cols, nodeid):
1355 """ Load the journal from the database
1356 """
1357 # now get the journal entries
1358 sql = 'select %s from %s__journal where nodeid=%s order by date'%(
1359 cols, classname, self.arg)
1360 self.sql(sql, (nodeid,))
1361 return self.cursor.fetchall()
1363 def pack(self, pack_before):
1364 """ Delete all journal entries except "create" before 'pack_before'.
1365 """
1366 date_stamp = self.to_sql_value(Date)(pack_before)
1368 # do the delete
1369 for classname in self.classes:
1370 sql = "delete from %s__journal where date<%s and "\
1371 "action<>'create'"%(classname, self.arg)
1372 self.sql(sql, (date_stamp,))
1374 def sql_commit(self, fail_ok=False):
1375 """ Actually commit to the database.
1376 """
1377 logging.getLogger('roundup.hyperdb').info('commit')
1379 self.conn.commit()
1381 # open a new cursor for subsequent work
1382 self.cursor = self.conn.cursor()
1384 def commit(self, fail_ok=False):
1385 """ Commit the current transactions.
1387 Save all data changed since the database was opened or since the
1388 last commit() or rollback().
1390 fail_ok indicates that the commit is allowed to fail. This is used
1391 in the web interface when committing cleaning of the session
1392 database. We don't care if there's a concurrency issue there.
1394 The only backend this seems to affect is postgres.
1395 """
1396 # commit the database
1397 self.sql_commit(fail_ok)
1399 # now, do all the other transaction stuff
1400 for method, args in self.transactions:
1401 method(*args)
1403 # save the indexer
1404 self.indexer.save_index()
1406 # clear out the transactions
1407 self.transactions = []
1409 # clear the cache: Don't carry over cached values from one
1410 # transaction to the next (there may be other changes from other
1411 # transactions)
1412 self.clearCache()
1414 def sql_rollback(self):
1415 self.conn.rollback()
1417 def rollback(self):
1418 """ Reverse all actions from the current transaction.
1420 Undo all the changes made since the database was opened or the last
1421 commit() or rollback() was performed.
1422 """
1423 logging.getLogger('roundup.hyperdb').info('rollback')
1425 self.sql_rollback()
1427 # roll back "other" transaction stuff
1428 for method, args in self.transactions:
1429 # delete temporary files
1430 if method == self.doStoreFile:
1431 self.rollbackStoreFile(*args)
1432 self.transactions = []
1434 # clear the cache
1435 self.clearCache()
1437 def sql_close(self):
1438 logging.getLogger('roundup.hyperdb').info('close')
1439 self.conn.close()
1441 def close(self):
1442 """ Close off the connection.
1443 """
1444 self.indexer.close()
1445 self.sql_close()
1447 #
1448 # The base Class class
1449 #
1450 class Class(hyperdb.Class):
1451 """ The handle to a particular class of nodes in a hyperdatabase.
1453 All methods except __repr__ and getnode must be implemented by a
1454 concrete backend Class.
1455 """
1457 def schema(self):
1458 """ A dumpable version of the schema that we can store in the
1459 database
1460 """
1461 return (self.key, [(x, repr(y)) for x,y in self.properties.iteritems()])
1463 def enableJournalling(self):
1464 """Turn journalling on for this class
1465 """
1466 self.do_journal = 1
1468 def disableJournalling(self):
1469 """Turn journalling off for this class
1470 """
1471 self.do_journal = 0
1473 # Editing nodes:
1474 def create(self, **propvalues):
1475 """ Create a new node of this class and return its id.
1477 The keyword arguments in 'propvalues' map property names to values.
1479 The values of arguments must be acceptable for the types of their
1480 corresponding properties or a TypeError is raised.
1482 If this class has a key property, it must be present and its value
1483 must not collide with other key strings or a ValueError is raised.
1485 Any other properties on this class that are missing from the
1486 'propvalues' dictionary are set to None.
1488 If an id in a link or multilink property does not refer to a valid
1489 node, an IndexError is raised.
1490 """
1491 self.fireAuditors('create', None, propvalues)
1492 newid = self.create_inner(**propvalues)
1493 self.fireReactors('create', newid, None)
1494 return newid
1496 def create_inner(self, **propvalues):
1497 """ Called by create, in-between the audit and react calls.
1498 """
1499 if 'id' in propvalues:
1500 raise KeyError('"id" is reserved')
1502 if self.db.journaltag is None:
1503 raise DatabaseError(_('Database open read-only'))
1505 if ('creator' in propvalues or 'actor' in propvalues or
1506 'creation' in propvalues or 'activity' in propvalues):
1507 raise KeyError('"creator", "actor", "creation" and '
1508 '"activity" are reserved')
1510 # new node's id
1511 newid = self.db.newid(self.classname)
1513 # validate propvalues
1514 num_re = re.compile('^\d+$')
1515 for key, value in propvalues.iteritems():
1516 if key == self.key:
1517 try:
1518 self.lookup(value)
1519 except KeyError:
1520 pass
1521 else:
1522 raise ValueError('node with key "%s" exists'%value)
1524 # try to handle this property
1525 try:
1526 prop = self.properties[key]
1527 except KeyError:
1528 raise KeyError('"%s" has no property "%s"'%(self.classname,
1529 key))
1531 if value is not None and isinstance(prop, Link):
1532 if type(value) != type(''):
1533 raise ValueError('link value must be String')
1534 link_class = self.properties[key].classname
1535 # if it isn't a number, it's a key
1536 if not num_re.match(value):
1537 try:
1538 value = self.db.classes[link_class].lookup(value)
1539 except (TypeError, KeyError):
1540 raise IndexError('new property "%s": %s not a %s'%(
1541 key, value, link_class))
1542 elif not self.db.getclass(link_class).hasnode(value):
1543 raise IndexError('%s has no node %s'%(link_class,
1544 value))
1546 # save off the value
1547 propvalues[key] = value
1549 # register the link with the newly linked node
1550 if self.do_journal and self.properties[key].do_journal:
1551 self.db.addjournal(link_class, value, 'link',
1552 (self.classname, newid, key))
1554 elif isinstance(prop, Multilink):
1555 if value is None:
1556 value = []
1557 if not hasattr(value, '__iter__'):
1558 raise TypeError('new property "%s" not an iterable of ids'%key)
1559 # clean up and validate the list of links
1560 link_class = self.properties[key].classname
1561 l = []
1562 for entry in value:
1563 if type(entry) != type(''):
1564 raise ValueError('"%s" multilink value (%r) '
1565 'must contain Strings'%(key, value))
1566 # if it isn't a number, it's a key
1567 if not num_re.match(entry):
1568 try:
1569 entry = self.db.classes[link_class].lookup(entry)
1570 except (TypeError, KeyError):
1571 raise IndexError('new property "%s": %s not a %s'%(
1572 key, entry, self.properties[key].classname))
1573 l.append(entry)
1574 value = l
1575 propvalues[key] = value
1577 # handle additions
1578 for nodeid in value:
1579 if not self.db.getclass(link_class).hasnode(nodeid):
1580 raise IndexError('%s has no node %s'%(link_class,
1581 nodeid))
1582 # register the link with the newly linked node
1583 if self.do_journal and self.properties[key].do_journal:
1584 self.db.addjournal(link_class, nodeid, 'link',
1585 (self.classname, newid, key))
1587 elif isinstance(prop, String):
1588 if type(value) != type('') and type(value) != type(u''):
1589 raise TypeError('new property "%s" not a string'%key)
1590 if prop.indexme:
1591 self.db.indexer.add_text((self.classname, newid, key),
1592 value)
1594 elif isinstance(prop, Password):
1595 if not isinstance(value, password.Password):
1596 raise TypeError('new property "%s" not a Password'%key)
1598 elif isinstance(prop, Date):
1599 if value is not None and not isinstance(value, date.Date):
1600 raise TypeError('new property "%s" not a Date'%key)
1602 elif isinstance(prop, Interval):
1603 if value is not None and not isinstance(value, date.Interval):
1604 raise TypeError('new property "%s" not an Interval'%key)
1606 elif value is not None and isinstance(prop, Number):
1607 try:
1608 float(value)
1609 except ValueError:
1610 raise TypeError('new property "%s" not numeric'%key)
1612 elif value is not None and isinstance(prop, Boolean):
1613 try:
1614 int(value)
1615 except ValueError:
1616 raise TypeError('new property "%s" not boolean'%key)
1618 # make sure there's data where there needs to be
1619 for key, prop in self.properties.iteritems():
1620 if key in propvalues:
1621 continue
1622 if key == self.key:
1623 raise ValueError('key property "%s" is required'%key)
1624 if isinstance(prop, Multilink):
1625 propvalues[key] = []
1626 else:
1627 propvalues[key] = None
1629 # done
1630 self.db.addnode(self.classname, newid, propvalues)
1631 if self.do_journal:
1632 self.db.addjournal(self.classname, newid, ''"create", {})
1634 # XXX numeric ids
1635 return str(newid)
1637 def get(self, nodeid, propname, default=_marker, cache=1):
1638 """Get the value of a property on an existing node of this class.
1640 'nodeid' must be the id of an existing node of this class or an
1641 IndexError is raised. 'propname' must be the name of a property
1642 of this class or a KeyError is raised.
1644 'cache' exists for backwards compatibility, and is not used.
1645 """
1646 if propname == 'id':
1647 return nodeid
1649 # get the node's dict
1650 d = self.db.getnode(self.classname, nodeid, fetch_multilinks=False)
1651 # handle common case -- that property is in dict -- first
1652 # if None and one of creator/creation actor/activity return None
1653 if propname in d:
1654 r = d [propname]
1655 # return copy of our list
1656 if isinstance (r, list):
1657 return r[:]
1658 if r is not None:
1659 return r
1660 elif propname in ('creation', 'activity', 'creator', 'actor'):
1661 return r
1663 # propname not in d:
1664 if propname == 'creation' or propname == 'activity':
1665 return date.Date()
1666 if propname == 'creator' or propname == 'actor':
1667 return self.db.getuid()
1669 # get the property (raises KeyError if invalid)
1670 prop = self.properties[propname]
1672 # lazy evaluation of Multilink
1673 if propname not in d and isinstance(prop, Multilink):
1674 self.db._materialize_multilink(self.classname, nodeid, d, propname)
1676 # handle there being no value in the table for the property
1677 if propname not in d or d[propname] is None:
1678 if default is _marker:
1679 if isinstance(prop, Multilink):
1680 return []
1681 else:
1682 return None
1683 else:
1684 return default
1686 # don't pass our list to other code
1687 if isinstance(prop, Multilink):
1688 return d[propname][:]
1690 return d[propname]
1692 def set(self, nodeid, **propvalues):
1693 """Modify a property on an existing node of this class.
1695 'nodeid' must be the id of an existing node of this class or an
1696 IndexError is raised.
1698 Each key in 'propvalues' must be the name of a property of this
1699 class or a KeyError is raised.
1701 All values in 'propvalues' must be acceptable types for their
1702 corresponding properties or a TypeError is raised.
1704 If the value of the key property is set, it must not collide with
1705 other key strings or a ValueError is raised.
1707 If the value of a Link or Multilink property contains an invalid
1708 node id, a ValueError is raised.
1709 """
1710 self.fireAuditors('set', nodeid, propvalues)
1711 oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid))
1712 propvalues = self.set_inner(nodeid, **propvalues)
1713 self.fireReactors('set', nodeid, oldvalues)
1714 return propvalues
1716 def set_inner(self, nodeid, **propvalues):
1717 """ Called by set, in-between the audit and react calls.
1718 """
1719 if not propvalues:
1720 return propvalues
1722 if ('creator' in propvalues or 'actor' in propvalues or
1723 'creation' in propvalues or 'activity' in propvalues):
1724 raise KeyError('"creator", "actor", "creation" and '
1725 '"activity" are reserved')
1727 if 'id' in propvalues:
1728 raise KeyError('"id" is reserved')
1730 if self.db.journaltag is None:
1731 raise DatabaseError(_('Database open read-only'))
1733 node = self.db.getnode(self.classname, nodeid)
1734 if self.is_retired(nodeid):
1735 raise IndexError('Requested item is retired')
1736 num_re = re.compile('^\d+$')
1738 # make a copy of the values dictionary - we'll modify the contents
1739 propvalues = propvalues.copy()
1741 # if the journal value is to be different, store it in here
1742 journalvalues = {}
1744 # remember the add/remove stuff for multilinks, making it easier
1745 # for the Database layer to do its stuff
1746 multilink_changes = {}
1748 for propname, value in list(propvalues.items()):
1749 # check to make sure we're not duplicating an existing key
1750 if propname == self.key and node[propname] != value:
1751 try:
1752 self.lookup(value)
1753 except KeyError:
1754 pass
1755 else:
1756 raise ValueError('node with key "%s" exists'%value)
1758 # this will raise the KeyError if the property isn't valid
1759 # ... we don't use getprops() here because we only care about
1760 # the writeable properties.
1761 try:
1762 prop = self.properties[propname]
1763 except KeyError:
1764 raise KeyError('"%s" has no property named "%s"'%(
1765 self.classname, propname))
1767 # if the value's the same as the existing value, no sense in
1768 # doing anything
1769 current = node.get(propname, None)
1770 if value == current:
1771 del propvalues[propname]
1772 continue
1773 journalvalues[propname] = current
1775 # do stuff based on the prop type
1776 if isinstance(prop, Link):
1777 link_class = prop.classname
1778 # if it isn't a number, it's a key
1779 if value is not None and not isinstance(value, type('')):
1780 raise ValueError('property "%s" link value be a string'%(
1781 propname))
1782 if isinstance(value, type('')) and not num_re.match(value):
1783 try:
1784 value = self.db.classes[link_class].lookup(value)
1785 except (TypeError, KeyError):
1786 raise IndexError('new property "%s": %s not a %s'%(
1787 propname, value, prop.classname))
1789 if (value is not None and
1790 not self.db.getclass(link_class).hasnode(value)):
1791 raise IndexError('%s has no node %s'%(link_class,
1792 value))
1794 if self.do_journal and prop.do_journal:
1795 # register the unlink with the old linked node
1796 if node[propname] is not None:
1797 self.db.addjournal(link_class, node[propname],
1798 ''"unlink", (self.classname, nodeid, propname))
1800 # register the link with the newly linked node
1801 if value is not None:
1802 self.db.addjournal(link_class, value, ''"link",
1803 (self.classname, nodeid, propname))
1805 elif isinstance(prop, Multilink):
1806 if value is None:
1807 value = []
1808 if not hasattr(value, '__iter__'):
1809 raise TypeError('new property "%s" not an iterable of'
1810 ' ids'%propname)
1811 link_class = self.properties[propname].classname
1812 l = []
1813 for entry in value:
1814 # if it isn't a number, it's a key
1815 if type(entry) != type(''):
1816 raise ValueError('new property "%s" link value '
1817 'must be a string'%propname)
1818 if not num_re.match(entry):
1819 try:
1820 entry = self.db.classes[link_class].lookup(entry)
1821 except (TypeError, KeyError):
1822 raise IndexError('new property "%s": %s not a %s'%(
1823 propname, entry,
1824 self.properties[propname].classname))
1825 l.append(entry)
1826 value = l
1827 propvalues[propname] = value
1829 # figure the journal entry for this property
1830 add = []
1831 remove = []
1833 # handle removals
1834 if propname in node:
1835 l = node[propname]
1836 else:
1837 l = []
1838 for id in l[:]:
1839 if id in value:
1840 continue
1841 # register the unlink with the old linked node
1842 if self.do_journal and self.properties[propname].do_journal:
1843 self.db.addjournal(link_class, id, 'unlink',
1844 (self.classname, nodeid, propname))
1845 l.remove(id)
1846 remove.append(id)
1848 # handle additions
1849 for id in value:
1850 if id in l:
1851 continue
1852 # We can safely check this condition after
1853 # checking that this is an addition to the
1854 # multilink since the condition was checked for
1855 # existing entries at the point they were added to
1856 # the multilink. Since the hasnode call will
1857 # result in a SQL query, it is more efficient to
1858 # avoid the check if possible.
1859 if not self.db.getclass(link_class).hasnode(id):
1860 raise IndexError('%s has no node %s'%(link_class,
1861 id))
1862 # register the link with the newly linked node
1863 if self.do_journal and self.properties[propname].do_journal:
1864 self.db.addjournal(link_class, id, 'link',
1865 (self.classname, nodeid, propname))
1866 l.append(id)
1867 add.append(id)
1869 # figure the journal entry
1870 l = []
1871 if add:
1872 l.append(('+', add))
1873 if remove:
1874 l.append(('-', remove))
1875 multilink_changes[propname] = (add, remove)
1876 if l:
1877 journalvalues[propname] = tuple(l)
1879 elif isinstance(prop, String):
1880 if value is not None and type(value) != type('') and type(value) != type(u''):
1881 raise TypeError('new property "%s" not a string'%propname)
1882 if prop.indexme:
1883 if value is None: value = ''
1884 self.db.indexer.add_text((self.classname, nodeid, propname),
1885 value)
1887 elif isinstance(prop, Password):
1888 if not isinstance(value, password.Password):
1889 raise TypeError('new property "%s" not a Password'%propname)
1890 propvalues[propname] = value
1891 journalvalues[propname] = \
1892 current and password.JournalPassword(current)
1894 elif value is not None and isinstance(prop, Date):
1895 if not isinstance(value, date.Date):
1896 raise TypeError('new property "%s" not a Date'% propname)
1897 propvalues[propname] = value
1899 elif value is not None and isinstance(prop, Interval):
1900 if not isinstance(value, date.Interval):
1901 raise TypeError('new property "%s" not an '
1902 'Interval'%propname)
1903 propvalues[propname] = value
1905 elif value is not None and isinstance(prop, Number):
1906 try:
1907 float(value)
1908 except ValueError:
1909 raise TypeError('new property "%s" not numeric'%propname)
1911 elif value is not None and isinstance(prop, Boolean):
1912 try:
1913 int(value)
1914 except ValueError:
1915 raise TypeError('new property "%s" not boolean'%propname)
1917 # nothing to do?
1918 if not propvalues:
1919 return propvalues
1921 # update the activity time
1922 propvalues['activity'] = date.Date()
1923 propvalues['actor'] = self.db.getuid()
1925 # do the set
1926 self.db.setnode(self.classname, nodeid, propvalues, multilink_changes)
1928 # remove the activity props now they're handled
1929 del propvalues['activity']
1930 del propvalues['actor']
1932 # journal the set
1933 if self.do_journal:
1934 self.db.addjournal(self.classname, nodeid, ''"set", journalvalues)
1936 return propvalues
1938 def retire(self, nodeid):
1939 """Retire a node.
1941 The properties on the node remain available from the get() method,
1942 and the node's id is never reused.
1944 Retired nodes are not returned by the find(), list(), or lookup()
1945 methods, and other nodes may reuse the values of their key properties.
1946 """
1947 if self.db.journaltag is None:
1948 raise DatabaseError(_('Database open read-only'))
1950 self.fireAuditors('retire', nodeid, None)
1952 # use the arg for __retired__ to cope with any odd database type
1953 # conversion (hello, sqlite)
1954 sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
1955 self.db.arg, self.db.arg)
1956 self.db.sql(sql, (nodeid, nodeid))
1957 if self.do_journal:
1958 self.db.addjournal(self.classname, nodeid, ''"retired", None)
1960 self.fireReactors('retire', nodeid, None)
1962 def restore(self, nodeid):
1963 """Restore a retired node.
1965 Make node available for all operations like it was before retirement.
1966 """
1967 if self.db.journaltag is None:
1968 raise DatabaseError(_('Database open read-only'))
1970 node = self.db.getnode(self.classname, nodeid)
1971 # check if key property was overrided
1972 key = self.getkey()
1973 try:
1974 id = self.lookup(node[key])
1975 except KeyError:
1976 pass
1977 else:
1978 raise KeyError("Key property (%s) of retired node clashes "
1979 "with existing one (%s)" % (key, node[key]))
1981 self.fireAuditors('restore', nodeid, None)
1982 # use the arg for __retired__ to cope with any odd database type
1983 # conversion (hello, sqlite)
1984 sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
1985 self.db.arg, self.db.arg)
1986 self.db.sql(sql, (0, nodeid))
1987 if self.do_journal:
1988 self.db.addjournal(self.classname, nodeid, ''"restored", None)
1990 self.fireReactors('restore', nodeid, None)
1992 def is_retired(self, nodeid):
1993 """Return true if the node is rerired
1994 """
1995 sql = 'select __retired__ from _%s where id=%s'%(self.classname,
1996 self.db.arg)
1997 self.db.sql(sql, (nodeid,))
1998 return int(self.db.sql_fetchone()[0]) > 0
2000 def destroy(self, nodeid):
2001 """Destroy a node.
2003 WARNING: this method should never be used except in extremely rare
2004 situations where there could never be links to the node being
2005 deleted
2007 WARNING: use retire() instead
2009 WARNING: the properties of this node will not be available ever again
2011 WARNING: really, use retire() instead
2013 Well, I think that's enough warnings. This method exists mostly to
2014 support the session storage of the cgi interface.
2016 The node is completely removed from the hyperdb, including all journal
2017 entries. It will no longer be available, and will generally break code
2018 if there are any references to the node.
2019 """
2020 if self.db.journaltag is None:
2021 raise DatabaseError(_('Database open read-only'))
2022 self.db.destroynode(self.classname, nodeid)
2024 # Locating nodes:
2025 def hasnode(self, nodeid):
2026 """Determine if the given nodeid actually exists
2027 """
2028 return self.db.hasnode(self.classname, nodeid)
2030 def setkey(self, propname):
2031 """Select a String property of this class to be the key property.
2033 'propname' must be the name of a String property of this class or
2034 None, or a TypeError is raised. The values of the key property on
2035 all existing nodes must be unique or a ValueError is raised.
2036 """
2037 prop = self.getprops()[propname]
2038 if not isinstance(prop, String):
2039 raise TypeError('key properties must be String')
2040 self.key = propname
2042 def getkey(self):
2043 """Return the name of the key property for this class or None."""
2044 return self.key
2046 def lookup(self, keyvalue):
2047 """Locate a particular node by its key property and return its id.
2049 If this class has no key property, a TypeError is raised. If the
2050 'keyvalue' matches one of the values for the key property among
2051 the nodes in this class, the matching node's id is returned;
2052 otherwise a KeyError is raised.
2053 """
2054 if not self.key:
2055 raise TypeError('No key property set for class %s'%self.classname)
2057 # use the arg to handle any odd database type conversion (hello,
2058 # sqlite)
2059 sql = "select id from _%s where _%s=%s and __retired__=%s"%(
2060 self.classname, self.key, self.db.arg, self.db.arg)
2061 self.db.sql(sql, (str(keyvalue), 0))
2063 # see if there was a result that's not retired
2064 row = self.db.sql_fetchone()
2065 if not row:
2066 raise KeyError('No key (%s) value "%s" for "%s"'%(self.key,
2067 keyvalue, self.classname))
2069 # return the id
2070 # XXX numeric ids
2071 return str(row[0])
2073 def find(self, **propspec):
2074 """Get the ids of nodes in this class which link to the given nodes.
2076 'propspec' consists of keyword args propname=nodeid or
2077 propname={nodeid:1, }
2078 'propname' must be the name of a property in this class, or a
2079 KeyError is raised. That property must be a Link or
2080 Multilink property, or a TypeError is raised.
2082 Any node in this class whose 'propname' property links to any of
2083 the nodeids will be returned. Examples::
2085 db.issue.find(messages='1')
2086 db.issue.find(messages={'1':1,'3':1}, files={'7':1})
2087 """
2088 # shortcut
2089 if not propspec:
2090 return []
2092 # validate the args
2093 props = self.getprops()
2094 for propname, nodeids in propspec.iteritems():
2095 # check the prop is OK
2096 prop = props[propname]
2097 if not isinstance(prop, Link) and not isinstance(prop, Multilink):
2098 raise TypeError("'%s' not a Link/Multilink property"%propname)
2100 # first, links
2101 a = self.db.arg
2102 allvalues = ()
2103 sql = []
2104 where = []
2105 for prop, values in propspec.iteritems():
2106 if not isinstance(props[prop], hyperdb.Link):
2107 continue
2108 if type(values) is type({}) and len(values) == 1:
2109 values = list(values)[0]
2110 if type(values) is type(''):
2111 allvalues += (values,)
2112 where.append('_%s = %s'%(prop, a))
2113 elif values is None:
2114 where.append('_%s is NULL'%prop)
2115 else:
2116 values = list(values)
2117 s = ''
2118 if None in values:
2119 values.remove(None)
2120 s = '_%s is NULL or '%prop
2121 allvalues += tuple(values)
2122 s += '_%s in (%s)'%(prop, ','.join([a]*len(values)))
2123 where.append('(' + s +')')
2124 if where:
2125 allvalues = (0, ) + allvalues
2126 sql.append("""select id from _%s where __retired__=%s
2127 and %s"""%(self.classname, a, ' and '.join(where)))
2129 # now multilinks
2130 for prop, values in propspec.iteritems():
2131 if not isinstance(props[prop], hyperdb.Multilink):
2132 continue
2133 if not values:
2134 continue
2135 allvalues += (0, )
2136 if type(values) is type(''):
2137 allvalues += (values,)
2138 s = a
2139 else:
2140 allvalues += tuple(values)
2141 s = ','.join([a]*len(values))
2142 tn = '%s_%s'%(self.classname, prop)
2143 sql.append("""select id from _%s, %s where __retired__=%s
2144 and id = %s.nodeid and %s.linkid in (%s)"""%(self.classname,
2145 tn, a, tn, tn, s))
2147 if not sql:
2148 return []
2149 sql = ' union '.join(sql)
2150 self.db.sql(sql, allvalues)
2151 # XXX numeric ids
2152 l = [str(x[0]) for x in self.db.sql_fetchall()]
2153 return l
2155 def stringFind(self, **requirements):
2156 """Locate a particular node by matching a set of its String
2157 properties in a caseless search.
2159 If the property is not a String property, a TypeError is raised.
2161 The return is a list of the id of all nodes that match.
2162 """
2163 where = []
2164 args = []
2165 for propname in requirements:
2166 prop = self.properties[propname]
2167 if not isinstance(prop, String):
2168 raise TypeError("'%s' not a String property"%propname)
2169 where.append(propname)
2170 args.append(requirements[propname].lower())
2172 # generate the where clause
2173 s = ' and '.join(['lower(_%s)=%s'%(col, self.db.arg) for col in where])
2174 sql = 'select id from _%s where %s and __retired__=%s'%(
2175 self.classname, s, self.db.arg)
2176 args.append(0)
2177 self.db.sql(sql, tuple(args))
2178 # XXX numeric ids
2179 l = [str(x[0]) for x in self.db.sql_fetchall()]
2180 return l
2182 def list(self):
2183 """ Return a list of the ids of the active nodes in this class.
2184 """
2185 return self.getnodeids(retired=0)
2187 def getnodeids(self, retired=None):
2188 """ Retrieve all the ids of the nodes for a particular Class.
2190 Set retired=None to get all nodes. Otherwise it'll get all the
2191 retired or non-retired nodes, depending on the flag.
2192 """
2193 # flip the sense of the 'retired' flag if we don't want all of them
2194 if retired is not None:
2195 args = (0, )
2196 if retired:
2197 compare = '>'
2198 else:
2199 compare = '='
2200 sql = 'select id from _%s where __retired__%s%s'%(self.classname,
2201 compare, self.db.arg)
2202 else:
2203 args = ()
2204 sql = 'select id from _%s'%self.classname
2205 self.db.sql(sql, args)
2206 # XXX numeric ids
2207 ids = [str(x[0]) for x in self.db.cursor.fetchall()]
2208 return ids
2210 def _subselect(self, classname, multilink_table):
2211 """Create a subselect. This is factored out because some
2212 databases (hmm only one, so far) doesn't support subselects
2213 look for "I can't believe it's not a toy RDBMS" in the mysql
2214 backend.
2215 """
2216 return '_%s.id not in (select nodeid from %s)'%(classname,
2217 multilink_table)
2219 # Some DBs order NULL values last. Set this variable in the backend
2220 # for prepending an order by clause for each attribute that causes
2221 # correct sort order for NULLs. Examples:
2222 # order_by_null_values = '(%s is not NULL)'
2223 # order_by_null_values = 'notnull(%s)'
2224 # The format parameter is replaced with the attribute.
2225 order_by_null_values = None
2227 def supports_subselects(self):
2228 '''Assuming DBs can do subselects, overwrite if they cannot.
2229 '''
2230 return True
2232 def _filter_multilink_expression_fallback(
2233 self, classname, multilink_table, expr):
2234 '''This is a fallback for database that do not support
2235 subselects.'''
2237 is_valid = expr.evaluate
2239 last_id, kws = None, []
2241 ids = IdListOptimizer()
2242 append = ids.append
2244 # This join and the evaluation in program space
2245 # can be expensive for larger databases!
2246 # TODO: Find a faster way to collect the data needed
2247 # to evalute the expression.
2248 # Moving the expression evaluation into the database
2249 # would be nice but this tricky: Think about the cases
2250 # where the multilink table does not have join values
2251 # needed in evaluation.
2253 stmnt = "SELECT c.id, m.linkid FROM _%s c " \
2254 "LEFT OUTER JOIN %s m " \
2255 "ON c.id = m.nodeid ORDER BY c.id" % (
2256 classname, multilink_table)
2257 self.db.sql(stmnt)
2259 # collect all multilink items for a class item
2260 for nid, kw in self.db.sql_fetchiter():
2261 if nid != last_id:
2262 if last_id is None:
2263 last_id = nid
2264 else:
2265 # we have all multilink items -> evaluate!
2266 if is_valid(kws): append(last_id)
2267 last_id, kws = nid, []
2268 if kw is not None:
2269 kws.append(kw)
2271 if last_id is not None and is_valid(kws):
2272 append(last_id)
2274 # we have ids of the classname table
2275 return ids.where("_%s.id" % classname, self.db.arg)
2277 def _filter_multilink_expression(self, classname, multilink_table, v):
2278 """ Filters out elements of the classname table that do not
2279 match the given expression.
2280 Returns tuple of 'WHERE' introns for the overall filter.
2281 """
2282 try:
2283 opcodes = [int(x) for x in v]
2284 if min(opcodes) >= -1: raise ValueError()
2286 expr = compile_expression(opcodes)
2288 if not self.supports_subselects():
2289 # We heavily rely on subselects. If there is
2290 # no decent support fall back to slower variant.
2291 return self._filter_multilink_expression_fallback(
2292 classname, multilink_table, expr)
2294 atom = \
2295 "%s IN(SELECT linkid FROM %s WHERE nodeid=a.id)" % (
2296 self.db.arg,
2297 multilink_table)
2299 intron = \
2300 "_%(classname)s.id in (SELECT id " \
2301 "FROM _%(classname)s AS a WHERE %(condition)s) " % {
2302 'classname' : classname,
2303 'condition' : expr.generate(lambda n: atom) }
2305 values = []
2306 def collect_values(n): values.append(n.x)
2307 expr.visit(collect_values)
2309 return intron, values
2310 except:
2311 # original behavior
2312 where = "%s.linkid in (%s)" % (
2313 multilink_table, ','.join([self.db.arg] * len(v)))
2314 return where, v, True # True to indicate original
2316 def _filter_sql (self, search_matches, filterspec, srt=[], grp=[], retr=0):
2317 """ Compute the proptree and the SQL/ARGS for a filter.
2318 For argument description see filter below.
2319 We return a 3-tuple, the proptree, the sql and the sql-args
2320 or None if no SQL is necessary.
2321 The flag retr serves to retrieve *all* non-Multilink properties
2322 (for filling the cache during a filter_iter)
2323 """
2324 # we can't match anything if search_matches is empty
2325 if not search_matches and search_matches is not None:
2326 return None
2328 icn = self.classname
2330 # vars to hold the components of the SQL statement
2331 frum = [] # FROM clauses
2332 loj = [] # LEFT OUTER JOIN clauses
2333 where = [] # WHERE clauses
2334 args = [] # *any* positional arguments
2335 a = self.db.arg
2337 # figure the WHERE clause from the filterspec
2338 mlfilt = 0 # are we joining with Multilink tables?
2339 sortattr = self._sortattr (group = grp, sort = srt)
2340 proptree = self._proptree(filterspec, sortattr, retr)
2341 mlseen = 0
2342 for pt in reversed(proptree.sortattr):
2343 p = pt
2344 while p.parent:
2345 if isinstance (p.propclass, Multilink):
2346 mlseen = True
2347 if mlseen:
2348 p.sort_ids_needed = True
2349 p.tree_sort_done = False
2350 p = p.parent
2351 if not mlseen:
2352 pt.attr_sort_done = pt.tree_sort_done = True
2353 proptree.compute_sort_done()
2355 cols = ['_%s.id'%icn]
2356 mlsort = []
2357 rhsnum = 0
2358 for p in proptree:
2359 rc = ac = oc = None
2360 cn = p.classname
2361 ln = p.uniqname
2362 pln = p.parent.uniqname
2363 pcn = p.parent.classname
2364 k = p.name
2365 v = p.val
2366 propclass = p.propclass
2367 if p.parent == proptree and p.name == 'id' \
2368 and 'retrieve' in p.need_for:
2369 p.sql_idx = 0
2370 if 'sort' in p.need_for or 'retrieve' in p.need_for:
2371 rc = oc = ac = '_%s._%s'%(pln, k)
2372 if isinstance(propclass, Multilink):
2373 if 'search' in p.need_for:
2374 mlfilt = 1
2375 tn = '%s_%s'%(pcn, k)
2376 if v in ('-1', ['-1'], []):
2377 # only match rows that have count(linkid)=0 in the
2378 # corresponding multilink table)
2379 where.append(self._subselect(pcn, tn))
2380 else:
2381 frum.append(tn)
2382 gen_join = True
2384 if p.has_values and isinstance(v, type([])):
2385 result = self._filter_multilink_expression(pln, tn, v)
2386 # XXX: We dont need an id join if we used the filter
2387 gen_join = len(result) == 3
2389 if gen_join:
2390 where.append('_%s.id=%s.nodeid'%(pln,tn))
2392 if p.children:
2393 frum.append('_%s as _%s' % (cn, ln))
2394 where.append('%s.linkid=_%s.id'%(tn, ln))
2396 if p.has_values:
2397 if isinstance(v, type([])):
2398 where.append(result[0])
2399 args += result[1]
2400 else:
2401 where.append('%s.linkid=%s'%(tn, a))
2402 args.append(v)
2403 if 'sort' in p.need_for:
2404 assert not p.attr_sort_done and not p.sort_ids_needed
2405 elif k == 'id':
2406 if 'search' in p.need_for:
2407 if isinstance(v, type([])):
2408 # If there are no permitted values, then the
2409 # where clause will always be false, and we
2410 # can optimize the query away.
2411 if not v:
2412 return []
2413 s = ','.join([a for x in v])
2414 where.append('_%s.%s in (%s)'%(pln, k, s))
2415 args = args + v
2416 else:
2417 where.append('_%s.%s=%s'%(pln, k, a))
2418 args.append(v)
2419 if 'sort' in p.need_for or 'retrieve' in p.need_for:
2420 rc = oc = ac = '_%s.id'%pln
2421 elif isinstance(propclass, String):
2422 if 'search' in p.need_for:
2423 if not isinstance(v, type([])):
2424 v = [v]
2426 # Quote the bits in the string that need it and then embed
2427 # in a "substring" search. Note - need to quote the '%' so
2428 # they make it through the python layer happily
2429 v = ['%%'+self.db.sql_stringquote(s)+'%%' for s in v]
2431 # now add to the where clause
2432 where.append('('
2433 +' and '.join(["_%s._%s LIKE '%s'"%(pln, k, s) for s in v])
2434 +')')
2435 # note: args are embedded in the query string now
2436 if 'sort' in p.need_for:
2437 oc = ac = 'lower(_%s._%s)'%(pln, k)
2438 elif isinstance(propclass, Link):
2439 if 'search' in p.need_for:
2440 if p.children:
2441 if 'sort' not in p.need_for:
2442 frum.append('_%s as _%s' % (cn, ln))
2443 where.append('_%s._%s=_%s.id'%(pln, k, ln))
2444 if p.has_values:
2445 if isinstance(v, type([])):
2446 d = {}
2447 for entry in v:
2448 if entry == '-1':
2449 entry = None
2450 d[entry] = entry
2451 l = []
2452 if None in d or not d:
2453 if None in d: del d[None]
2454 l.append('_%s._%s is NULL'%(pln, k))
2455 if d:
2456 v = list(d)
2457 s = ','.join([a for x in v])
2458 l.append('(_%s._%s in (%s))'%(pln, k, s))
2459 args = args + v
2460 if l:
2461 where.append('(' + ' or '.join(l) +')')
2462 else:
2463 if v in ('-1', None):
2464 v = None
2465 where.append('_%s._%s is NULL'%(pln, k))
2466 else:
2467 where.append('_%s._%s=%s'%(pln, k, a))
2468 args.append(v)
2469 if 'sort' in p.need_for:
2470 lp = p.cls.labelprop()
2471 oc = ac = '_%s._%s'%(pln, k)
2472 if lp != 'id':
2473 if p.tree_sort_done:
2474 loj.append(
2475 'LEFT OUTER JOIN _%s as _%s on _%s._%s=_%s.id'%(
2476 cn, ln, pln, k, ln))
2477 oc = '_%s._%s'%(ln, lp)
2478 if 'retrieve' in p.need_for:
2479 rc = '_%s._%s'%(pln, k)
2480 elif isinstance(propclass, Date) and 'search' in p.need_for:
2481 dc = self.db.to_sql_value(hyperdb.Date)
2482 if isinstance(v, type([])):
2483 s = ','.join([a for x in v])
2484 where.append('_%s._%s in (%s)'%(pln, k, s))
2485 args = args + [dc(date.Date(x)) for x in v]
2486 else:
2487 try:
2488 # Try to filter on range of dates
2489 date_rng = propclass.range_from_raw(v, self.db)
2490 if date_rng.from_value:
2491 where.append('_%s._%s >= %s'%(pln, k, a))
2492 args.append(dc(date_rng.from_value))
2493 if date_rng.to_value:
2494 where.append('_%s._%s <= %s'%(pln, k, a))
2495 args.append(dc(date_rng.to_value))
2496 except ValueError:
2497 # If range creation fails - ignore that search parameter
2498 pass
2499 elif isinstance(propclass, Interval):
2500 # filter/sort using the __<prop>_int__ column
2501 if 'search' in p.need_for:
2502 if isinstance(v, type([])):
2503 s = ','.join([a for x in v])
2504 where.append('_%s.__%s_int__ in (%s)'%(pln, k, s))
2505 args = args + [date.Interval(x).as_seconds() for x in v]
2506 else:
2507 try:
2508 # Try to filter on range of intervals
2509 date_rng = Range(v, date.Interval)
2510 if date_rng.from_value:
2511 where.append('_%s.__%s_int__ >= %s'%(pln, k, a))
2512 args.append(date_rng.from_value.as_seconds())
2513 if date_rng.to_value:
2514 where.append('_%s.__%s_int__ <= %s'%(pln, k, a))
2515 args.append(date_rng.to_value.as_seconds())
2516 except ValueError:
2517 # If range creation fails - ignore search parameter
2518 pass
2519 if 'sort' in p.need_for:
2520 oc = ac = '_%s.__%s_int__'%(pln,k)
2521 if 'retrieve' in p.need_for:
2522 rc = '_%s._%s'%(pln,k)
2523 elif isinstance(propclass, Boolean) and 'search' in p.need_for:
2524 if type(v) == type(""):
2525 v = v.split(',')
2526 if type(v) != type([]):
2527 v = [v]
2528 bv = []
2529 for val in v:
2530 if type(val) is type(''):
2531 bv.append(propclass.from_raw (val))
2532 else:
2533 bv.append(bool(val))
2534 if len(bv) == 1:
2535 where.append('_%s._%s=%s'%(pln, k, a))
2536 args = args + bv
2537 else:
2538 s = ','.join([a for x in v])
2539 where.append('_%s._%s in (%s)'%(pln, k, s))
2540 args = args + bv
2541 elif 'search' in p.need_for:
2542 if isinstance(v, type([])):
2543 s = ','.join([a for x in v])
2544 where.append('_%s._%s in (%s)'%(pln, k, s))
2545 args = args + v
2546 else:
2547 where.append('_%s._%s=%s'%(pln, k, a))
2548 args.append(v)
2549 if oc:
2550 if p.sort_ids_needed:
2551 if rc == ac:
2552 p.sql_idx = len(cols)
2553 p.auxcol = len(cols)
2554 cols.append(ac)
2555 if p.tree_sort_done and p.sort_direction:
2556 # Don't select top-level id or multilink twice
2557 if (not p.sort_ids_needed or ac != oc) and (p.name != 'id'
2558 or p.parent != proptree):
2559 if rc == oc:
2560 p.sql_idx = len(cols)
2561 cols.append(oc)
2562 desc = ['', ' desc'][p.sort_direction == '-']
2563 # Some SQL dbs sort NULL values last -- we want them first.
2564 if (self.order_by_null_values and p.name != 'id'):
2565 nv = self.order_by_null_values % oc
2566 cols.append(nv)
2567 p.orderby.append(nv + desc)
2568 p.orderby.append(oc + desc)
2569 if 'retrieve' in p.need_for and p.sql_idx is None:
2570 assert(rc)
2571 p.sql_idx = len(cols)
2572 cols.append (rc)
2574 props = self.getprops()
2576 # don't match retired nodes
2577 where.append('_%s.__retired__=0'%icn)
2579 # add results of full text search
2580 if search_matches is not None:
2581 s = ','.join([a for x in search_matches])
2582 where.append('_%s.id in (%s)'%(icn, s))
2583 args = args + [x for x in search_matches]
2585 # construct the SQL
2586 frum.append('_'+icn)
2587 frum = ','.join(frum)
2588 if where:
2589 where = ' where ' + (' and '.join(where))
2590 else:
2591 where = ''
2592 if mlfilt:
2593 # we're joining tables on the id, so we will get dupes if we
2594 # don't distinct()
2595 cols[0] = 'distinct(_%s.id)'%icn
2597 order = []
2598 # keep correct sequence of order attributes.
2599 for sa in proptree.sortattr:
2600 if not sa.attr_sort_done:
2601 continue
2602 order.extend(sa.orderby)
2603 if order:
2604 order = ' order by %s'%(','.join(order))
2605 else:
2606 order = ''
2608 cols = ','.join(cols)
2609 loj = ' '.join(loj)
2610 sql = 'select %s from %s %s %s%s'%(cols, frum, loj, where, order)
2611 args = tuple(args)
2612 __traceback_info__ = (sql, args)
2613 return proptree, sql, args
2615 def filter(self, search_matches, filterspec, sort=[], group=[]):
2616 """Return a list of the ids of the active nodes in this class that
2617 match the 'filter' spec, sorted by the group spec and then the
2618 sort spec
2620 "filterspec" is {propname: value(s)}
2622 "sort" and "group" are [(dir, prop), ...] where dir is '+', '-'
2623 or None and prop is a prop name or None. Note that for
2624 backward-compatibility reasons a single (dir, prop) tuple is
2625 also allowed.
2627 "search_matches" is a container type or None
2629 The filter must match all properties specificed. If the property
2630 value to match is a list:
2632 1. String properties must match all elements in the list, and
2633 2. Other properties must match any of the elements in the list.
2634 """
2635 if __debug__:
2636 start_t = time.time()
2638 sq = self._filter_sql (search_matches, filterspec, sort, group)
2639 # nothing to match?
2640 if sq is None:
2641 return []
2642 proptree, sql, args = sq
2644 self.db.sql(sql, args)
2645 l = self.db.sql_fetchall()
2647 # Compute values needed for sorting in proptree.sort
2648 for p in proptree:
2649 if hasattr(p, 'auxcol'):
2650 p.sort_ids = p.sort_result = [row[p.auxcol] for row in l]
2651 # return the IDs (the first column)
2652 # XXX numeric ids
2653 l = [str(row[0]) for row in l]
2654 l = proptree.sort (l)
2656 if __debug__:
2657 self.db.stats['filtering'] += (time.time() - start_t)
2658 return l
2660 def filter_iter(self, search_matches, filterspec, sort=[], group=[]):
2661 """Iterator similar to filter above with same args.
2662 Limitation: We don't sort on multilinks.
2663 This uses an optimisation: We put all nodes that are in the
2664 current row into the node cache. Then we return the node id.
2665 That way a fetch of a node won't create another sql-fetch (with
2666 a join) from the database because the nodes are already in the
2667 cache. We're using our own temporary cursor.
2668 """
2669 sq = self._filter_sql(search_matches, filterspec, sort, group, retr=1)
2670 # nothing to match?
2671 if sq is None:
2672 return
2673 proptree, sql, args = sq
2674 cursor = self.db.conn.cursor()
2675 self.db.sql(sql, args, cursor)
2676 classes = {}
2677 for p in proptree:
2678 if 'retrieve' in p.need_for:
2679 cn = p.parent.classname
2680 ptid = p.parent.id # not the nodeid!
2681 key = (cn, ptid)
2682 if key not in classes:
2683 classes[key] = {}
2684 name = p.name
2685 assert (name)
2686 classes[key][name] = p
2687 p.to_hyperdb = self.db.to_hyperdb_value(p.propclass.__class__)
2688 while True:
2689 row = cursor.fetchone()
2690 if not row: break
2691 # populate cache with current items
2692 for (classname, ptid), pt in classes.iteritems():
2693 nodeid = str(row[pt['id'].sql_idx])
2694 key = (classname, nodeid)
2695 if key in self.db.cache:
2696 self.db._cache_refresh(key)
2697 continue
2698 node = {}
2699 for propname, p in pt.iteritems():
2700 value = row[p.sql_idx]
2701 if value is not None:
2702 value = p.to_hyperdb(value)
2703 node[propname] = value
2704 self.db._cache_save(key, node)
2705 yield str(row[0])
2707 def filter_sql(self, sql):
2708 """Return a list of the ids of the items in this class that match
2709 the SQL provided. The SQL is a complete "select" statement.
2711 The SQL select must include the item id as the first column.
2713 This function DOES NOT filter out retired items, add on a where
2714 clause "__retired__=0" if you don't want retired nodes.
2715 """
2716 if __debug__:
2717 start_t = time.time()
2719 self.db.sql(sql)
2720 l = self.db.sql_fetchall()
2722 if __debug__:
2723 self.db.stats['filtering'] += (time.time() - start_t)
2724 return l
2726 def count(self):
2727 """Get the number of nodes in this class.
2729 If the returned integer is 'numnodes', the ids of all the nodes
2730 in this class run from 1 to numnodes, and numnodes+1 will be the
2731 id of the next node to be created in this class.
2732 """
2733 return self.db.countnodes(self.classname)
2735 # Manipulating properties:
2736 def getprops(self, protected=1):
2737 """Return a dictionary mapping property names to property objects.
2738 If the "protected" flag is true, we include protected properties -
2739 those which may not be modified.
2740 """
2741 d = self.properties.copy()
2742 if protected:
2743 d['id'] = String()
2744 d['creation'] = hyperdb.Date()
2745 d['activity'] = hyperdb.Date()
2746 d['creator'] = hyperdb.Link('user')
2747 d['actor'] = hyperdb.Link('user')
2748 return d
2750 def addprop(self, **properties):
2751 """Add properties to this class.
2753 The keyword arguments in 'properties' must map names to property
2754 objects, or a TypeError is raised. None of the keys in 'properties'
2755 may collide with the names of existing properties, or a ValueError
2756 is raised before any properties have been added.
2757 """
2758 for key in properties:
2759 if key in self.properties:
2760 raise ValueError(key)
2761 self.properties.update(properties)
2763 def index(self, nodeid):
2764 """Add (or refresh) the node to search indexes
2765 """
2766 # find all the String properties that have indexme
2767 for prop, propclass in self.getprops().iteritems():
2768 if isinstance(propclass, String) and propclass.indexme:
2769 self.db.indexer.add_text((self.classname, nodeid, prop),
2770 str(self.get(nodeid, prop)))
2772 #
2773 # import / export support
2774 #
2775 def export_list(self, propnames, nodeid):
2776 """ Export a node - generate a list of CSV-able data in the order
2777 specified by propnames for the given node.
2778 """
2779 properties = self.getprops()
2780 l = []
2781 for prop in propnames:
2782 proptype = properties[prop]
2783 value = self.get(nodeid, prop)
2784 # "marshal" data where needed
2785 if value is None:
2786 pass
2787 elif isinstance(proptype, hyperdb.Date):
2788 value = value.get_tuple()
2789 elif isinstance(proptype, hyperdb.Interval):
2790 value = value.get_tuple()
2791 elif isinstance(proptype, hyperdb.Password):
2792 value = str(value)
2793 l.append(repr(value))
2794 l.append(repr(self.is_retired(nodeid)))
2795 return l
2797 def import_list(self, propnames, proplist):
2798 """ Import a node - all information including "id" is present and
2799 should not be sanity checked. Triggers are not triggered. The
2800 journal should be initialised using the "creator" and "created"
2801 information.
2803 Return the nodeid of the node imported.
2804 """
2805 if self.db.journaltag is None:
2806 raise DatabaseError(_('Database open read-only'))
2807 properties = self.getprops()
2809 # make the new node's property map
2810 d = {}
2811 retire = 0
2812 if not "id" in propnames:
2813 newid = self.db.newid(self.classname)
2814 else:
2815 newid = eval(proplist[propnames.index("id")])
2816 for i in range(len(propnames)):
2817 # Use eval to reverse the repr() used to output the CSV
2818 value = eval(proplist[i])
2820 # Figure the property for this column
2821 propname = propnames[i]
2823 # "unmarshal" where necessary
2824 if propname == 'id':
2825 continue
2826 elif propname == 'is retired':
2827 # is the item retired?
2828 if int(value):
2829 retire = 1
2830 continue
2831 elif value is None:
2832 d[propname] = None
2833 continue
2835 prop = properties[propname]
2836 if value is None:
2837 # don't set Nones
2838 continue
2839 elif isinstance(prop, hyperdb.Date):
2840 value = date.Date(value)
2841 elif isinstance(prop, hyperdb.Interval):
2842 value = date.Interval(value)
2843 elif isinstance(prop, hyperdb.Password):
2844 value = password.Password(encrypted=value)
2845 elif isinstance(prop, String):
2846 if isinstance(value, unicode):
2847 value = value.encode('utf8')
2848 if not isinstance(value, str):
2849 raise TypeError('new property "%(propname)s" not a '
2850 'string: %(value)r'%locals())
2851 if prop.indexme:
2852 self.db.indexer.add_text((self.classname, newid, propname),
2853 value)
2854 d[propname] = value
2856 # get a new id if necessary
2857 if newid is None:
2858 newid = self.db.newid(self.classname)
2860 # insert new node or update existing?
2861 if not self.hasnode(newid):
2862 self.db.addnode(self.classname, newid, d) # insert
2863 else:
2864 self.db.setnode(self.classname, newid, d) # update
2866 # retire?
2867 if retire:
2868 # use the arg for __retired__ to cope with any odd database type
2869 # conversion (hello, sqlite)
2870 sql = 'update _%s set __retired__=%s where id=%s'%(self.classname,
2871 self.db.arg, self.db.arg)
2872 self.db.sql(sql, (newid, newid))
2873 return newid
2875 def export_journals(self):
2876 """Export a class's journal - generate a list of lists of
2877 CSV-able data:
2879 nodeid, date, user, action, params
2881 No heading here - the columns are fixed.
2882 """
2883 properties = self.getprops()
2884 r = []
2885 for nodeid in self.getnodeids():
2886 for nodeid, date, user, action, params in self.history(nodeid):
2887 date = date.get_tuple()
2888 if action == 'set':
2889 export_data = {}
2890 for propname, value in params.iteritems():
2891 if propname not in properties:
2892 # property no longer in the schema
2893 continue
2895 prop = properties[propname]
2896 # make sure the params are eval()'able
2897 if value is None:
2898 pass
2899 elif isinstance(prop, Date):
2900 value = value.get_tuple()
2901 elif isinstance(prop, Interval):
2902 value = value.get_tuple()
2903 elif isinstance(prop, Password):
2904 value = str(value)
2905 export_data[propname] = value
2906 params = export_data
2907 elif action == 'create' and params:
2908 # old tracker with data stored in the create!
2909 params = {}
2910 l = [nodeid, date, user, action, params]
2911 r.append(list(map(repr, l)))
2912 return r
2914 class FileClass(hyperdb.FileClass, Class):
2915 """This class defines a large chunk of data. To support this, it has a
2916 mandatory String property "content" which is typically saved off
2917 externally to the hyperdb.
2919 The default MIME type of this data is defined by the
2920 "default_mime_type" class attribute, which may be overridden by each
2921 node if the class defines a "type" String property.
2922 """
2923 def __init__(self, db, classname, **properties):
2924 """The newly-created class automatically includes the "content"
2925 and "type" properties.
2926 """
2927 if 'content' not in properties:
2928 properties['content'] = hyperdb.String(indexme='yes')
2929 if 'type' not in properties:
2930 properties['type'] = hyperdb.String()
2931 Class.__init__(self, db, classname, **properties)
2933 def create(self, **propvalues):
2934 """ snaffle the file propvalue and store in a file
2935 """
2936 # we need to fire the auditors now, or the content property won't
2937 # be in propvalues for the auditors to play with
2938 self.fireAuditors('create', None, propvalues)
2940 # now remove the content property so it's not stored in the db
2941 content = propvalues['content']
2942 del propvalues['content']
2944 # do the database create
2945 newid = self.create_inner(**propvalues)
2947 # figure the mime type
2948 mime_type = propvalues.get('type', self.default_mime_type)
2950 # and index!
2951 if self.properties['content'].indexme:
2952 self.db.indexer.add_text((self.classname, newid, 'content'),
2953 content, mime_type)
2955 # store off the content as a file
2956 self.db.storefile(self.classname, newid, None, content)
2958 # fire reactors
2959 self.fireReactors('create', newid, None)
2961 return newid
2963 def get(self, nodeid, propname, default=_marker, cache=1):
2964 """ Trap the content propname and get it from the file
2966 'cache' exists for backwards compatibility, and is not used.
2967 """
2968 poss_msg = 'Possibly a access right configuration problem.'
2969 if propname == 'content':
2970 try:
2971 return self.db.getfile(self.classname, nodeid, None)
2972 except IOError, strerror:
2973 # BUG: by catching this we donot see an error in the log.
2974 return 'ERROR reading file: %s%s\n%s\n%s'%(
2975 self.classname, nodeid, poss_msg, strerror)
2976 if default is not _marker:
2977 return Class.get(self, nodeid, propname, default)
2978 else:
2979 return Class.get(self, nodeid, propname)
2981 def set(self, itemid, **propvalues):
2982 """ Snarf the "content" propvalue and update it in a file
2983 """
2984 self.fireAuditors('set', itemid, propvalues)
2985 oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid))
2987 # now remove the content property so it's not stored in the db
2988 content = None
2989 if 'content' in propvalues:
2990 content = propvalues['content']
2991 del propvalues['content']
2993 # do the database create
2994 propvalues = self.set_inner(itemid, **propvalues)
2996 # do content?
2997 if content:
2998 # store and possibly index
2999 self.db.storefile(self.classname, itemid, None, content)
3000 if self.properties['content'].indexme:
3001 mime_type = self.get(itemid, 'type', self.default_mime_type)
3002 self.db.indexer.add_text((self.classname, itemid, 'content'),
3003 content, mime_type)
3004 propvalues['content'] = content
3006 # fire reactors
3007 self.fireReactors('set', itemid, oldvalues)
3008 return propvalues
3010 def index(self, nodeid):
3011 """ Add (or refresh) the node to search indexes.
3013 Use the content-type property for the content property.
3014 """
3015 # find all the String properties that have indexme
3016 for prop, propclass in self.getprops().iteritems():
3017 if prop == 'content' and propclass.indexme:
3018 mime_type = self.get(nodeid, 'type', self.default_mime_type)
3019 self.db.indexer.add_text((self.classname, nodeid, 'content'),
3020 str(self.get(nodeid, 'content')), mime_type)
3021 elif isinstance(propclass, hyperdb.String) and propclass.indexme:
3022 # index them under (classname, nodeid, property)
3023 try:
3024 value = str(self.get(nodeid, prop))
3025 except IndexError:
3026 # node has been destroyed
3027 continue
3028 self.db.indexer.add_text((self.classname, nodeid, prop), value)
3030 # XXX deviation from spec - was called ItemClass
3031 class IssueClass(Class, roundupdb.IssueClass):
3032 # Overridden methods:
3033 def __init__(self, db, classname, **properties):
3034 """The newly-created class automatically includes the "messages",
3035 "files", "nosy", and "superseder" properties. If the 'properties'
3036 dictionary attempts to specify any of these properties or a
3037 "creation", "creator", "activity" or "actor" property, a ValueError
3038 is raised.
3039 """
3040 if 'title' not in properties:
3041 properties['title'] = hyperdb.String(indexme='yes')
3042 if 'messages' not in properties:
3043 properties['messages'] = hyperdb.Multilink("msg")
3044 if 'files' not in properties:
3045 properties['files'] = hyperdb.Multilink("file")
3046 if 'nosy' not in properties:
3047 # note: journalling is turned off as it really just wastes
3048 # space. this behaviour may be overridden in an instance
3049 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
3050 if 'superseder' not in properties:
3051 properties['superseder'] = hyperdb.Multilink(classname)
3052 Class.__init__(self, db, classname, **properties)
3054 # vim: set et sts=4 sw=4 :