1 # $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $
2 '''Implement an in-memory hyperdb for testing purposes.
3 '''
5 import shutil
7 from roundup import hyperdb
8 from roundup import roundupdb
9 from roundup import security
10 from roundup import password
11 from roundup import configuration
12 from roundup.backends import back_anydbm
13 from roundup.backends import indexer_dbm
14 from roundup.backends import sessions_dbm
15 from roundup.backends import indexer_common
16 from roundup.hyperdb import *
17 from roundup.support import ensureParentsExist
19 def new_config(debug=False):
20 config = configuration.CoreConfig()
21 config.DATABASE = "db"
22 #config.logging = MockNull()
23 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
24 if debug:
25 config.LOGGING_LEVEL = "DEBUG"
26 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
27 config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
28 return config
30 def create(journaltag, create=True, debug=False):
31 db = Database(new_config(debug), journaltag)
33 # load standard schema
34 schema = os.path.join(os.path.dirname(__file__),
35 '../share/roundup/templates/classic/schema.py')
36 vars = dict(globals())
37 vars['db'] = db
38 execfile(schema, vars)
39 initial_data = os.path.join(os.path.dirname(__file__),
40 '../share/roundup/templates/classic/initial_data.py')
41 vars = dict(db=db, admin_email='admin@test.com',
42 adminpw=password.Password('sekrit'))
43 execfile(initial_data, vars)
45 # load standard detectors
46 dirname = os.path.join(os.path.dirname(__file__),
47 '../share/roundup/templates/classic/detectors')
48 for fn in os.listdir(dirname):
49 if not fn.endswith('.py'): continue
50 vars = {}
51 execfile(os.path.join(dirname, fn), vars)
52 vars['init'](db)
54 '''
55 status = Class(db, "status", name=String())
56 status.setkey("name")
57 priority = Class(db, "priority", name=String(), order=String())
58 priority.setkey("name")
59 keyword = Class(db, "keyword", name=String(), order=String())
60 keyword.setkey("name")
61 user = Class(db, "user", username=String(), password=Password(),
62 assignable=Boolean(), age=Number(), roles=String(), address=String(),
63 supervisor=Link('user'),realname=String(),alternate_addresses=String())
64 user.setkey("username")
65 file = FileClass(db, "file", name=String(), type=String(),
66 comment=String(indexme="yes"), fooz=Password())
67 file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
68 issue = IssueClass(db, "issue", title=String(indexme="yes"),
69 status=Link("status"), nosy=Multilink("user"), deadline=Date(),
70 foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
71 priority=Link('priority'), spam=Multilink('msg'),
72 feedback=Link('msg'))
73 stuff = Class(db, "stuff", stuff=String())
74 session = Class(db, 'session', title=String())
75 msg = FileClass(db, "msg", date=Date(),
76 author=Link("user", do_journal='no'),
77 files=Multilink('file'), inreplyto=String(),
78 messageid=String(), summary=String(),
79 content=String(),
80 recipients=Multilink("user", do_journal='no')
81 )
82 '''
83 if create:
84 db.user.create(username="fred", roles='User',
85 password=password.Password('sekrit'), address='fred@example.com')
87 db.security.addPermissionToRole('User', 'Email Access')
88 '''
89 db.security.addPermission(name='Register', klass='user')
90 db.security.addPermissionToRole('User', 'Web Access')
91 db.security.addPermissionToRole('Anonymous', 'Email Access')
92 db.security.addPermissionToRole('Anonymous', 'Register', 'user')
93 for cl in 'issue', 'file', 'msg', 'keyword':
94 db.security.addPermissionToRole('User', 'View', cl)
95 db.security.addPermissionToRole('User', 'Edit', cl)
96 db.security.addPermissionToRole('User', 'Create', cl)
97 for cl in 'priority', 'status':
98 db.security.addPermissionToRole('User', 'View', cl)
99 '''
100 return db
102 class cldb(dict):
103 def close(self):
104 pass
106 class BasicDatabase(dict):
107 ''' Provide a nice encapsulation of an anydbm store.
109 Keys are id strings, values are automatically marshalled data.
110 '''
111 def __getitem__(self, key):
112 if key not in self:
113 d = self[key] = {}
114 return d
115 return super(BasicDatabase, self).__getitem__(key)
116 def exists(self, infoid):
117 return infoid in self
118 def get(self, infoid, value, default=None):
119 return self[infoid].get(value, default)
120 def getall(self, infoid):
121 if infoid not in self:
122 raise KeyError(infoid)
123 return self[infoid]
124 def set(self, infoid, **newvalues):
125 self[infoid].update(newvalues)
126 def list(self):
127 return self.keys()
128 def destroy(self, infoid):
129 del self[infoid]
130 def commit(self):
131 pass
132 def close(self):
133 pass
134 def updateTimestamp(self, sessid):
135 pass
136 def clean(self):
137 pass
139 class Sessions(BasicDatabase, sessions_dbm.Sessions):
140 name = 'sessions'
142 class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
143 name = 'otks'
145 class Indexer(indexer_dbm.Indexer):
146 def __init__(self, db):
147 indexer_common.Indexer.__init__(self, db)
148 self.reindex = 0
149 self.quiet = 9
150 self.changed = 0
152 def load_index(self, reload=0, wordlist=None):
153 # Unless reload is indicated, do not load twice
154 if self.index_loaded() and not reload:
155 return 0
156 self.words = {}
157 self.files = {'_TOP':(0,None)}
158 self.fileids = {}
159 self.changed = 0
161 def save_index(self):
162 pass
163 def force_reindex(self):
164 # TODO I'm concerned that force_reindex may not be tested by
165 # testForcedReindexing if the functionality can just be removed
166 pass
168 class Database(back_anydbm.Database):
169 """A database for storing records containing flexible data types.
171 Transaction stuff TODO:
173 - check the timestamp of the class file and nuke the cache if it's
174 modified. Do some sort of conflict checking on the dirty stuff.
175 - perhaps detect write collisions (related to above)?
176 """
177 def __init__(self, config, journaltag=None):
178 self.config, self.journaltag = config, journaltag
179 self.classes = {}
180 self.items = {}
181 self.ids = {}
182 self.journals = {}
183 self.files = {}
184 self.tx_files = {}
185 self.security = security.Security(self)
186 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
187 'filtering': 0}
188 self.sessions = Sessions()
189 self.otks = OneTimeKeys()
190 self.indexer = Indexer(self)
192 # anydbm bits
193 self.cache = {} # cache of nodes loaded or created
194 self.dirtynodes = {} # keep track of the dirty nodes by class
195 self.newnodes = {} # keep track of the new nodes by class
196 self.destroyednodes = {}# keep track of the destroyed nodes by class
197 self.transactions = []
199 def filename(self, classname, nodeid, property=None, create=0):
200 shutil.copyfile(__file__, __file__+'.dummy')
201 return __file__+'.dummy'
203 def filesize(self, classname, nodeid, property=None, create=0):
204 return len(self.getfile(classname, nodeid, property))
206 def post_init(self):
207 pass
209 def refresh_database(self):
210 pass
212 def getSessionManager(self):
213 return self.sessions
215 def getOTKManager(self):
216 return self.otks
218 def reindex(self, classname=None, show_progress=False):
219 pass
221 def __repr__(self):
222 return '<memorydb instance at %x>'%id(self)
224 def storefile(self, classname, nodeid, property, content):
225 self.tx_files[classname, nodeid, property] = content
226 self.transactions.append((self.doStoreFile, (classname, nodeid,
227 property)))
229 def getfile(self, classname, nodeid, property):
230 if (classname, nodeid, property) in self.tx_files:
231 return self.tx_files[classname, nodeid, property]
232 return self.files[classname, nodeid, property]
234 def doStoreFile(self, classname, nodeid, property, **databases):
235 self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
236 return (classname, nodeid)
238 def rollbackStoreFile(self, classname, nodeid, property, **databases):
239 del self.tx_files[classname, nodeid, property]
241 def numfiles(self):
242 return len(self.files) + len(self.tx_files)
244 def close(self):
245 self.clearCache()
246 self.tx_files = {}
247 # kill the schema too
248 self.classes = {}
249 # just keep the .items
251 #
252 # Classes
253 #
254 def __getattr__(self, classname):
255 """A convenient way of calling self.getclass(classname)."""
256 if self.classes.has_key(classname):
257 return self.classes[classname]
258 raise AttributeError, classname
260 def addclass(self, cl):
261 cn = cl.classname
262 if self.classes.has_key(cn):
263 raise ValueError, cn
264 self.classes[cn] = cl
265 if cn not in self.items:
266 self.items[cn] = cldb()
267 self.ids[cn] = 0
269 # add default Edit and View permissions
270 self.security.addPermission(name="Create", klass=cn,
271 description="User is allowed to create "+cn)
272 self.security.addPermission(name="Edit", klass=cn,
273 description="User is allowed to edit "+cn)
274 self.security.addPermission(name="View", klass=cn,
275 description="User is allowed to access "+cn)
277 def getclasses(self):
278 """Return a list of the names of all existing classes."""
279 l = self.classes.keys()
280 l.sort()
281 return l
283 def getclass(self, classname):
284 """Get the Class object representing a particular class.
286 If 'classname' is not a valid class name, a KeyError is raised.
287 """
288 try:
289 return self.classes[classname]
290 except KeyError:
291 raise KeyError, 'There is no class called "%s"'%classname
293 #
294 # Class DBs
295 #
296 def clear(self):
297 self.items = {}
299 def getclassdb(self, classname, mode='r'):
300 """ grab a connection to the class db that will be used for
301 multiple actions
302 """
303 return self.items[classname]
305 def getCachedJournalDB(self, classname):
306 return self.journals.setdefault(classname, {})
308 #
309 # Node IDs
310 #
311 def newid(self, classname):
312 self.ids[classname] += 1
313 return str(self.ids[classname])
314 def setid(self, classname, id):
315 self.ids[classname] = int(id)
317 #
318 # Journal
319 #
320 def doSaveJournal(self, classname, nodeid, action, params, creator,
321 creation):
322 if creator is None:
323 creator = self.getuid()
324 if creation is None:
325 creation = date.Date()
326 self.journals.setdefault(classname, {}).setdefault(nodeid,
327 []).append((nodeid, creation, creator, action, params))
329 def doSetJournal(self, classname, nodeid, journal):
330 self.journals.setdefault(classname, {})[nodeid] = journal
332 def getjournal(self, classname, nodeid):
333 # our journal result
334 res = []
336 # add any journal entries for transactions not committed to the
337 # database
338 for method, args in self.transactions:
339 if method != self.doSaveJournal:
340 continue
341 (cache_classname, cache_nodeid, cache_action, cache_params,
342 cache_creator, cache_creation) = args
343 if cache_classname == classname and cache_nodeid == nodeid:
344 if not cache_creator:
345 cache_creator = self.getuid()
346 if not cache_creation:
347 cache_creation = date.Date()
348 res.append((cache_nodeid, cache_creation, cache_creator,
349 cache_action, cache_params))
350 try:
351 res += self.journals.get(classname, {})[nodeid]
352 except KeyError:
353 if res: return res
354 raise IndexError, nodeid
355 return res
357 def pack(self, pack_before):
358 """ Delete all journal entries except "create" before 'pack_before'.
359 """
360 pack_before = pack_before.serialise()
361 for classname in self.journals:
362 db = self.journals[classname]
363 for key in db:
364 # get the journal for this db entry
365 l = []
366 last_set_entry = None
367 for entry in db[key]:
368 # unpack the entry
369 (nodeid, date_stamp, self.journaltag, action,
370 params) = entry
371 date_stamp = date_stamp.serialise()
372 # if the entry is after the pack date, _or_ the initial
373 # create entry, then it stays
374 if date_stamp > pack_before or action == 'create':
375 l.append(entry)
376 db[key] = l
378 class Class(back_anydbm.Class):
379 pass
381 class FileClass(back_anydbm.FileClass):
382 def __init__(self, db, classname, **properties):
383 if not properties.has_key('content'):
384 properties['content'] = hyperdb.String(indexme='yes')
385 if not properties.has_key('type'):
386 properties['type'] = hyperdb.String()
387 back_anydbm.Class.__init__(self, db, classname, **properties)
389 def export_files(self, dirname, nodeid):
390 dest = self.exportFilename(dirname, nodeid)
391 ensureParentsExist(dest)
392 f = open(dest, 'wb')
393 f.write(self.db.files[self.classname, nodeid, None])
394 f.close()
396 def import_files(self, dirname, nodeid):
397 source = self.exportFilename(dirname, nodeid)
398 f = open(source, 'rb')
399 self.db.files[self.classname, nodeid, None] = f.read()
400 f.close()
401 mime_type = None
402 props = self.getprops()
403 if props.has_key('type'):
404 mime_type = self.get(nodeid, 'type')
405 if not mime_type:
406 mime_type = self.default_mime_type
407 if props['content'].indexme:
408 self.db.indexer.add_text((self.classname, nodeid, 'content'),
409 self.get(nodeid, 'content'), mime_type)
411 # deviation from spec - was called ItemClass
412 class IssueClass(Class, roundupdb.IssueClass):
413 # Overridden methods:
414 def __init__(self, db, classname, **properties):
415 """The newly-created class automatically includes the "messages",
416 "files", "nosy", and "superseder" properties. If the 'properties'
417 dictionary attempts to specify any of these properties or a
418 "creation" or "activity" property, a ValueError is raised.
419 """
420 if not properties.has_key('title'):
421 properties['title'] = hyperdb.String(indexme='yes')
422 if not properties.has_key('messages'):
423 properties['messages'] = hyperdb.Multilink("msg")
424 if not properties.has_key('files'):
425 properties['files'] = hyperdb.Multilink("file")
426 if not properties.has_key('nosy'):
427 # note: journalling is turned off as it really just wastes
428 # space. this behaviour may be overridden in an instance
429 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
430 if not properties.has_key('superseder'):
431 properties['superseder'] = hyperdb.Multilink(classname)
432 Class.__init__(self, db, classname, **properties)
434 # vim: set et sts=4 sw=4 :