1 # $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $
2 '''Implement an in-memory hyperdb for testing purposes.
3 '''
5 import shutil
7 from roundup import hyperdb
8 from roundup import roundupdb
9 from roundup import security
10 from roundup import password
11 from roundup import configuration
12 from roundup.backends import back_anydbm
13 from roundup.backends import indexer_dbm
14 from roundup.backends import sessions_dbm
15 from roundup.backends import indexer_common
16 from roundup.hyperdb import *
17 from roundup.support import ensureParentsExist
19 def new_config():
20 config = configuration.CoreConfig()
21 config.DATABASE = "db"
22 #config.logging = MockNull()
23 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
24 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
25 config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
26 return config
28 def create(journaltag, create=True):
29 db = Database(new_config(), journaltag)
31 # load standard schema
32 schema = os.path.join(os.path.dirname(__file__),
33 '../share/roundup/templates/classic/schema.py')
34 vars = dict(globals())
35 vars['db'] = db
36 execfile(schema, vars)
37 initial_data = os.path.join(os.path.dirname(__file__),
38 '../share/roundup/templates/classic/initial_data.py')
39 vars = dict(db=db, admin_email='admin@test.com',
40 adminpw=password.Password('sekrit'))
41 execfile(initial_data, vars)
43 # load standard detectors
44 dirname = os.path.join(os.path.dirname(__file__),
45 '../share/roundup/templates/classic/detectors')
46 for fn in os.listdir(dirname):
47 if not fn.endswith('.py'): continue
48 vars = {}
49 execfile(os.path.join(dirname, fn), vars)
50 vars['init'](db)
52 '''
53 status = Class(db, "status", name=String())
54 status.setkey("name")
55 priority = Class(db, "priority", name=String(), order=String())
56 priority.setkey("name")
57 keyword = Class(db, "keyword", name=String(), order=String())
58 keyword.setkey("name")
59 user = Class(db, "user", username=String(), password=Password(),
60 assignable=Boolean(), age=Number(), roles=String(), address=String(),
61 supervisor=Link('user'),realname=String(),alternate_addresses=String())
62 user.setkey("username")
63 file = FileClass(db, "file", name=String(), type=String(),
64 comment=String(indexme="yes"), fooz=Password())
65 file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
66 issue = IssueClass(db, "issue", title=String(indexme="yes"),
67 status=Link("status"), nosy=Multilink("user"), deadline=Date(),
68 foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
69 priority=Link('priority'), spam=Multilink('msg'),
70 feedback=Link('msg'))
71 stuff = Class(db, "stuff", stuff=String())
72 session = Class(db, 'session', title=String())
73 msg = FileClass(db, "msg", date=Date(),
74 author=Link("user", do_journal='no'),
75 files=Multilink('file'), inreplyto=String(),
76 messageid=String(), summary=String(),
77 content=String(),
78 recipients=Multilink("user", do_journal='no')
79 )
80 '''
81 if create:
82 db.user.create(username="fred", roles='User',
83 password=password.Password('sekrit'), address='fred@example.com')
85 db.security.addPermissionToRole('User', 'Email Access')
86 '''
87 db.security.addPermission(name='Register', klass='user')
88 db.security.addPermissionToRole('User', 'Web Access')
89 db.security.addPermissionToRole('Anonymous', 'Email Access')
90 db.security.addPermissionToRole('Anonymous', 'Register', 'user')
91 for cl in 'issue', 'file', 'msg', 'keyword':
92 db.security.addPermissionToRole('User', 'View', cl)
93 db.security.addPermissionToRole('User', 'Edit', cl)
94 db.security.addPermissionToRole('User', 'Create', cl)
95 for cl in 'priority', 'status':
96 db.security.addPermissionToRole('User', 'View', cl)
97 '''
98 return db
100 class cldb(dict):
101 def close(self):
102 pass
104 class BasicDatabase(dict):
105 ''' Provide a nice encapsulation of an anydbm store.
107 Keys are id strings, values are automatically marshalled data.
108 '''
109 def __getitem__(self, key):
110 if key not in self:
111 d = self[key] = {}
112 return d
113 return super(BasicDatabase, self).__getitem__(key)
114 def exists(self, infoid):
115 return infoid in self
116 def get(self, infoid, value, default=None):
117 return self[infoid].get(value, default)
118 def getall(self, infoid):
119 return self[infoid]
120 def set(self, infoid, **newvalues):
121 self[infoid].update(newvalues)
122 def list(self):
123 return self.keys()
124 def destroy(self, infoid):
125 del self[infoid]
126 def commit(self):
127 pass
128 def close(self):
129 pass
130 def updateTimestamp(self, sessid):
131 pass
132 def clean(self):
133 pass
135 class Sessions(BasicDatabase, sessions_dbm.Sessions):
136 name = 'sessions'
138 class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
139 name = 'otks'
141 class Indexer(indexer_dbm.Indexer):
142 def __init__(self, db):
143 indexer_common.Indexer.__init__(self, db)
144 self.reindex = 0
145 self.quiet = 9
146 self.changed = 0
148 def load_index(self, reload=0, wordlist=None):
149 # Unless reload is indicated, do not load twice
150 if self.index_loaded() and not reload:
151 return 0
152 self.words = {}
153 self.files = {'_TOP':(0,None)}
154 self.fileids = {}
155 self.changed = 0
157 def save_index(self):
158 pass
159 def force_reindex(self):
160 # TODO I'm concerned that force_reindex may not be tested by
161 # testForcedReindexing if the functionality can just be removed
162 pass
164 class Database(back_anydbm.Database):
165 """A database for storing records containing flexible data types.
167 Transaction stuff TODO:
169 - check the timestamp of the class file and nuke the cache if it's
170 modified. Do some sort of conflict checking on the dirty stuff.
171 - perhaps detect write collisions (related to above)?
172 """
173 def __init__(self, config, journaltag=None):
174 self.config, self.journaltag = config, journaltag
175 self.classes = {}
176 self.items = {}
177 self.ids = {}
178 self.journals = {}
179 self.files = {}
180 self.tx_files = {}
181 self.security = security.Security(self)
182 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
183 'filtering': 0}
184 self.sessions = Sessions()
185 self.otks = OneTimeKeys()
186 self.indexer = Indexer(self)
187 self.sessions = Sessions()
189 # anydbm bits
190 self.cache = {} # cache of nodes loaded or created
191 self.dirtynodes = {} # keep track of the dirty nodes by class
192 self.newnodes = {} # keep track of the new nodes by class
193 self.destroyednodes = {}# keep track of the destroyed nodes by class
194 self.transactions = []
197 def filename(self, classname, nodeid, property=None, create=0):
198 shutil.copyfile(__file__, __file__+'.dummy')
199 return __file__+'.dummy'
201 def filesize(self, classname, nodeid, property=None, create=0):
202 return len(self.getfile(classname, nodeid, property))
204 def post_init(self):
205 pass
207 def refresh_database(self):
208 pass
210 def getSessionManager(self):
211 return self.sessions
213 def getOTKManager(self):
214 return self.otks
216 def reindex(self, classname=None, show_progress=False):
217 pass
219 def __repr__(self):
220 return '<memorydb instance at %x>'%id(self)
222 def storefile(self, classname, nodeid, property, content):
223 self.tx_files[classname, nodeid, property] = content
224 self.transactions.append((self.doStoreFile, (classname, nodeid,
225 property)))
227 def getfile(self, classname, nodeid, property):
228 if (classname, nodeid, property) in self.tx_files:
229 return self.tx_files[classname, nodeid, property]
230 return self.files[classname, nodeid, property]
232 def doStoreFile(self, classname, nodeid, property, **databases):
233 self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
234 return (classname, nodeid)
236 def rollbackStoreFile(self, classname, nodeid, property, **databases):
237 del self.tx_files[classname, nodeid, property]
239 def numfiles(self):
240 return len(self.files) + len(self.tx_files)
242 def close(self):
243 self.clearCache()
244 self.tx_files = {}
245 # kill the schema too
246 self.classes = {}
247 # just keep the .items
249 #
250 # Classes
251 #
252 def __getattr__(self, classname):
253 """A convenient way of calling self.getclass(classname)."""
254 if self.classes.has_key(classname):
255 return self.classes[classname]
256 raise AttributeError, classname
258 def addclass(self, cl):
259 cn = cl.classname
260 if self.classes.has_key(cn):
261 raise ValueError, cn
262 self.classes[cn] = cl
263 if cn not in self.items:
264 self.items[cn] = cldb()
265 self.ids[cn] = 0
267 # add default Edit and View permissions
268 self.security.addPermission(name="Create", klass=cn,
269 description="User is allowed to create "+cn)
270 self.security.addPermission(name="Edit", klass=cn,
271 description="User is allowed to edit "+cn)
272 self.security.addPermission(name="View", klass=cn,
273 description="User is allowed to access "+cn)
275 def getclasses(self):
276 """Return a list of the names of all existing classes."""
277 l = self.classes.keys()
278 l.sort()
279 return l
281 def getclass(self, classname):
282 """Get the Class object representing a particular class.
284 If 'classname' is not a valid class name, a KeyError is raised.
285 """
286 try:
287 return self.classes[classname]
288 except KeyError:
289 raise KeyError, 'There is no class called "%s"'%classname
291 #
292 # Class DBs
293 #
294 def clear(self):
295 self.items = {}
297 def getclassdb(self, classname, mode='r'):
298 """ grab a connection to the class db that will be used for
299 multiple actions
300 """
301 return self.items[classname]
303 def getCachedJournalDB(self, classname):
304 return self.journals.setdefault(classname, {})
306 #
307 # Node IDs
308 #
309 def newid(self, classname):
310 self.ids[classname] += 1
311 return str(self.ids[classname])
312 def setid(self, classname, id):
313 self.ids[classname] = int(id)
315 #
316 # Journal
317 #
318 def doSaveJournal(self, classname, nodeid, action, params, creator,
319 creation):
320 if creator is None:
321 creator = self.getuid()
322 if creation is None:
323 creation = date.Date()
324 self.journals.setdefault(classname, {}).setdefault(nodeid,
325 []).append((nodeid, creation, creator, action, params))
327 def doSetJournal(self, classname, nodeid, journal):
328 self.journals.setdefault(classname, {})[nodeid] = journal
330 def getjournal(self, classname, nodeid):
331 # our journal result
332 res = []
334 # add any journal entries for transactions not committed to the
335 # database
336 for method, args in self.transactions:
337 if method != self.doSaveJournal:
338 continue
339 (cache_classname, cache_nodeid, cache_action, cache_params,
340 cache_creator, cache_creation) = args
341 if cache_classname == classname and cache_nodeid == nodeid:
342 if not cache_creator:
343 cache_creator = self.getuid()
344 if not cache_creation:
345 cache_creation = date.Date()
346 res.append((cache_nodeid, cache_creation, cache_creator,
347 cache_action, cache_params))
348 try:
349 res += self.journals.get(classname, {})[nodeid]
350 except KeyError:
351 if res: return res
352 raise IndexError, nodeid
353 return res
355 def pack(self, pack_before):
356 """ Delete all journal entries except "create" before 'pack_before'.
357 """
358 pack_before = pack_before.serialise()
359 for classname in self.journals:
360 db = self.journals[classname]
361 for key in db:
362 # get the journal for this db entry
363 l = []
364 last_set_entry = None
365 for entry in db[key]:
366 # unpack the entry
367 (nodeid, date_stamp, self.journaltag, action,
368 params) = entry
369 date_stamp = date_stamp.serialise()
370 # if the entry is after the pack date, _or_ the initial
371 # create entry, then it stays
372 if date_stamp > pack_before or action == 'create':
373 l.append(entry)
374 db[key] = l
376 class Class(back_anydbm.Class):
377 pass
379 class FileClass(back_anydbm.FileClass):
380 def __init__(self, db, classname, **properties):
381 if not properties.has_key('content'):
382 properties['content'] = hyperdb.String(indexme='yes')
383 if not properties.has_key('type'):
384 properties['type'] = hyperdb.String()
385 back_anydbm.Class.__init__(self, db, classname, **properties)
387 def export_files(self, dirname, nodeid):
388 dest = self.exportFilename(dirname, nodeid)
389 ensureParentsExist(dest)
390 f = open(dest, 'wb')
391 f.write(self.db.files[self.classname, nodeid, None])
392 f.close()
394 def import_files(self, dirname, nodeid):
395 source = self.exportFilename(dirname, nodeid)
396 f = open(source, 'rb')
397 self.db.files[self.classname, nodeid, None] = f.read()
398 f.close()
399 mime_type = None
400 props = self.getprops()
401 if props.has_key('type'):
402 mime_type = self.get(nodeid, 'type')
403 if not mime_type:
404 mime_type = self.default_mime_type
405 if props['content'].indexme:
406 self.db.indexer.add_text((self.classname, nodeid, 'content'),
407 self.get(nodeid, 'content'), mime_type)
409 # deviation from spec - was called ItemClass
410 class IssueClass(Class, roundupdb.IssueClass):
411 # Overridden methods:
412 def __init__(self, db, classname, **properties):
413 """The newly-created class automatically includes the "messages",
414 "files", "nosy", and "superseder" properties. If the 'properties'
415 dictionary attempts to specify any of these properties or a
416 "creation" or "activity" property, a ValueError is raised.
417 """
418 if not properties.has_key('title'):
419 properties['title'] = hyperdb.String(indexme='yes')
420 if not properties.has_key('messages'):
421 properties['messages'] = hyperdb.Multilink("msg")
422 if not properties.has_key('files'):
423 properties['files'] = hyperdb.Multilink("file")
424 if not properties.has_key('nosy'):
425 # note: journalling is turned off as it really just wastes
426 # space. this behaviour may be overridden in an instance
427 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
428 if not properties.has_key('superseder'):
429 properties['superseder'] = hyperdb.Multilink(classname)
430 Class.__init__(self, db, classname, **properties)
432 # vim: set et sts=4 sw=4 :