1 # $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $
2 '''Implement an in-memory hyperdb for testing purposes.
3 '''
5 import shutil
7 from roundup import hyperdb
8 from roundup import roundupdb
9 from roundup import security
10 from roundup import password
11 from roundup import configuration
12 from roundup.backends import back_anydbm
13 from roundup.backends import indexer_dbm
14 from roundup.backends import sessions_dbm
15 from roundup.backends import indexer_common
16 from roundup.hyperdb import *
17 from roundup.support import ensureParentsExist
19 def new_config():
20 config = configuration.CoreConfig()
21 config.DATABASE = "db"
22 #config.logging = MockNull()
23 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
24 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
25 config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
26 return config
28 def create(journaltag, create=True):
29 db = Database(new_config(), journaltag)
31 # load standard schema
32 schema = os.path.join(os.path.dirname(__file__),
33 '../share/roundup/templates/classic/schema.py')
34 vars = dict(globals())
35 vars['db'] = db
36 execfile(schema, vars)
37 initial_data = os.path.join(os.path.dirname(__file__),
38 '../share/roundup/templates/classic/initial_data.py')
39 vars = dict(db=db, admin_email='admin@test.com',
40 adminpw=password.Password('sekrit'))
41 execfile(initial_data, vars)
43 # load standard detectors
44 dirname = os.path.join(os.path.dirname(__file__),
45 '../share/roundup/templates/classic/detectors')
46 for fn in os.listdir(dirname):
47 if not fn.endswith('.py'): continue
48 vars = {}
49 execfile(os.path.join(dirname, fn), vars)
50 vars['init'](db)
52 '''
53 status = Class(db, "status", name=String())
54 status.setkey("name")
55 priority = Class(db, "priority", name=String(), order=String())
56 priority.setkey("name")
57 keyword = Class(db, "keyword", name=String(), order=String())
58 keyword.setkey("name")
59 user = Class(db, "user", username=String(), password=Password(),
60 assignable=Boolean(), age=Number(), roles=String(), address=String(),
61 supervisor=Link('user'),realname=String(),alternate_addresses=String())
62 user.setkey("username")
63 file = FileClass(db, "file", name=String(), type=String(),
64 comment=String(indexme="yes"), fooz=Password())
65 file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
66 issue = IssueClass(db, "issue", title=String(indexme="yes"),
67 status=Link("status"), nosy=Multilink("user"), deadline=Date(),
68 foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
69 priority=Link('priority'), spam=Multilink('msg'),
70 feedback=Link('msg'))
71 stuff = Class(db, "stuff", stuff=String())
72 session = Class(db, 'session', title=String())
73 msg = FileClass(db, "msg", date=Date(),
74 author=Link("user", do_journal='no'),
75 files=Multilink('file'), inreplyto=String(),
76 messageid=String(), summary=String(),
77 content=String(),
78 recipients=Multilink("user", do_journal='no')
79 )
80 '''
81 if create:
82 db.user.create(username="fred", roles='User',
83 password=password.Password('sekrit'), address='fred@example.com')
85 db.security.addPermissionToRole('User', 'Email Access')
86 '''
87 db.security.addPermission(name='Register', klass='user')
88 db.security.addPermissionToRole('User', 'Web Access')
89 db.security.addPermissionToRole('Anonymous', 'Email Access')
90 db.security.addPermissionToRole('Anonymous', 'Register', 'user')
91 for cl in 'issue', 'file', 'msg', 'keyword':
92 db.security.addPermissionToRole('User', 'View', cl)
93 db.security.addPermissionToRole('User', 'Edit', cl)
94 db.security.addPermissionToRole('User', 'Create', cl)
95 for cl in 'priority', 'status':
96 db.security.addPermissionToRole('User', 'View', cl)
97 '''
98 return db
100 class cldb(dict):
101 def close(self):
102 pass
104 class BasicDatabase(dict):
105 ''' Provide a nice encapsulation of an anydbm store.
107 Keys are id strings, values are automatically marshalled data.
108 '''
109 def __getitem__(self, key):
110 if key not in self:
111 d = self[key] = {}
112 return d
113 return super(BasicDatabase, self).__getitem__(key)
114 def exists(self, infoid):
115 return infoid in self
116 def get(self, infoid, value, default=None):
117 return self[infoid].get(value, default)
118 def getall(self, infoid):
119 return self[infoid]
120 def set(self, infoid, **newvalues):
121 self[infoid].update(newvalues)
122 def list(self):
123 return self.keys()
124 def destroy(self, infoid):
125 del self[infoid]
126 def commit(self):
127 pass
128 def close(self):
129 pass
130 def updateTimestamp(self, sessid):
131 pass
132 def clean(self):
133 pass
135 class Sessions(BasicDatabase, sessions_dbm.Sessions):
136 name = 'sessions'
138 class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
139 name = 'otks'
141 class Indexer(indexer_dbm.Indexer):
142 def __init__(self, db):
143 indexer_common.Indexer.__init__(self, db)
144 self.reindex = 0
145 self.quiet = 9
146 self.changed = 0
148 def load_index(self, reload=0, wordlist=None):
149 # Unless reload is indicated, do not load twice
150 if self.index_loaded() and not reload:
151 return 0
152 self.words = {}
153 self.files = {'_TOP':(0,None)}
154 self.fileids = {}
155 self.changed = 0
157 def save_index(self):
158 pass
159 def force_reindex(self):
160 # TODO I'm concerned that force_reindex may not be tested by
161 # testForcedReindexing if the functionality can just be removed
162 pass
164 class Database(back_anydbm.Database):
165 """A database for storing records containing flexible data types.
167 Transaction stuff TODO:
169 - check the timestamp of the class file and nuke the cache if it's
170 modified. Do some sort of conflict checking on the dirty stuff.
171 - perhaps detect write collisions (related to above)?
172 """
173 def __init__(self, config, journaltag=None):
174 self.config, self.journaltag = config, journaltag
175 self.classes = {}
176 self.items = {}
177 self.ids = {}
178 self.journals = {}
179 self.files = {}
180 self.tx_files = {}
181 self.security = security.Security(self)
182 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
183 'filtering': 0}
184 self.sessions = Sessions()
185 self.otks = OneTimeKeys()
186 self.indexer = Indexer(self)
188 # anydbm bits
189 self.cache = {} # cache of nodes loaded or created
190 self.dirtynodes = {} # keep track of the dirty nodes by class
191 self.newnodes = {} # keep track of the new nodes by class
192 self.destroyednodes = {}# keep track of the destroyed nodes by class
193 self.transactions = []
195 def filename(self, classname, nodeid, property=None, create=0):
196 shutil.copyfile(__file__, __file__+'.dummy')
197 return __file__+'.dummy'
199 def filesize(self, classname, nodeid, property=None, create=0):
200 return len(self.getfile(classname, nodeid, property))
202 def post_init(self):
203 pass
205 def refresh_database(self):
206 pass
208 def getSessionManager(self):
209 return self.sessions
211 def getOTKManager(self):
212 return self.otks
214 def reindex(self, classname=None, show_progress=False):
215 pass
217 def __repr__(self):
218 return '<memorydb instance at %x>'%id(self)
220 def storefile(self, classname, nodeid, property, content):
221 self.tx_files[classname, nodeid, property] = content
222 self.transactions.append((self.doStoreFile, (classname, nodeid,
223 property)))
225 def getfile(self, classname, nodeid, property):
226 if (classname, nodeid, property) in self.tx_files:
227 return self.tx_files[classname, nodeid, property]
228 return self.files[classname, nodeid, property]
230 def doStoreFile(self, classname, nodeid, property, **databases):
231 self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
232 return (classname, nodeid)
234 def rollbackStoreFile(self, classname, nodeid, property, **databases):
235 del self.tx_files[classname, nodeid, property]
237 def numfiles(self):
238 return len(self.files) + len(self.tx_files)
240 def close(self):
241 self.clearCache()
242 self.tx_files = {}
243 # kill the schema too
244 self.classes = {}
245 # just keep the .items
247 #
248 # Classes
249 #
250 def __getattr__(self, classname):
251 """A convenient way of calling self.getclass(classname)."""
252 if self.classes.has_key(classname):
253 return self.classes[classname]
254 raise AttributeError, classname
256 def addclass(self, cl):
257 cn = cl.classname
258 if self.classes.has_key(cn):
259 raise ValueError, cn
260 self.classes[cn] = cl
261 if cn not in self.items:
262 self.items[cn] = cldb()
263 self.ids[cn] = 0
265 # add default Edit and View permissions
266 self.security.addPermission(name="Create", klass=cn,
267 description="User is allowed to create "+cn)
268 self.security.addPermission(name="Edit", klass=cn,
269 description="User is allowed to edit "+cn)
270 self.security.addPermission(name="View", klass=cn,
271 description="User is allowed to access "+cn)
273 def getclasses(self):
274 """Return a list of the names of all existing classes."""
275 l = self.classes.keys()
276 l.sort()
277 return l
279 def getclass(self, classname):
280 """Get the Class object representing a particular class.
282 If 'classname' is not a valid class name, a KeyError is raised.
283 """
284 try:
285 return self.classes[classname]
286 except KeyError:
287 raise KeyError, 'There is no class called "%s"'%classname
289 #
290 # Class DBs
291 #
292 def clear(self):
293 self.items = {}
295 def getclassdb(self, classname, mode='r'):
296 """ grab a connection to the class db that will be used for
297 multiple actions
298 """
299 return self.items[classname]
301 def getCachedJournalDB(self, classname):
302 return self.journals.setdefault(classname, {})
304 #
305 # Node IDs
306 #
307 def newid(self, classname):
308 self.ids[classname] += 1
309 return str(self.ids[classname])
310 def setid(self, classname, id):
311 self.ids[classname] = int(id)
313 #
314 # Journal
315 #
316 def doSaveJournal(self, classname, nodeid, action, params, creator,
317 creation):
318 if creator is None:
319 creator = self.getuid()
320 if creation is None:
321 creation = date.Date()
322 self.journals.setdefault(classname, {}).setdefault(nodeid,
323 []).append((nodeid, creation, creator, action, params))
325 def doSetJournal(self, classname, nodeid, journal):
326 self.journals.setdefault(classname, {})[nodeid] = journal
328 def getjournal(self, classname, nodeid):
329 # our journal result
330 res = []
332 # add any journal entries for transactions not committed to the
333 # database
334 for method, args in self.transactions:
335 if method != self.doSaveJournal:
336 continue
337 (cache_classname, cache_nodeid, cache_action, cache_params,
338 cache_creator, cache_creation) = args
339 if cache_classname == classname and cache_nodeid == nodeid:
340 if not cache_creator:
341 cache_creator = self.getuid()
342 if not cache_creation:
343 cache_creation = date.Date()
344 res.append((cache_nodeid, cache_creation, cache_creator,
345 cache_action, cache_params))
346 try:
347 res += self.journals.get(classname, {})[nodeid]
348 except KeyError:
349 if res: return res
350 raise IndexError, nodeid
351 return res
353 def pack(self, pack_before):
354 """ Delete all journal entries except "create" before 'pack_before'.
355 """
356 pack_before = pack_before.serialise()
357 for classname in self.journals:
358 db = self.journals[classname]
359 for key in db:
360 # get the journal for this db entry
361 l = []
362 last_set_entry = None
363 for entry in db[key]:
364 # unpack the entry
365 (nodeid, date_stamp, self.journaltag, action,
366 params) = entry
367 date_stamp = date_stamp.serialise()
368 # if the entry is after the pack date, _or_ the initial
369 # create entry, then it stays
370 if date_stamp > pack_before or action == 'create':
371 l.append(entry)
372 db[key] = l
374 class Class(back_anydbm.Class):
375 pass
377 class FileClass(back_anydbm.FileClass):
378 def __init__(self, db, classname, **properties):
379 if not properties.has_key('content'):
380 properties['content'] = hyperdb.String(indexme='yes')
381 if not properties.has_key('type'):
382 properties['type'] = hyperdb.String()
383 back_anydbm.Class.__init__(self, db, classname, **properties)
385 def export_files(self, dirname, nodeid):
386 dest = self.exportFilename(dirname, nodeid)
387 ensureParentsExist(dest)
388 f = open(dest, 'wb')
389 f.write(self.db.files[self.classname, nodeid, None])
390 f.close()
392 def import_files(self, dirname, nodeid):
393 source = self.exportFilename(dirname, nodeid)
394 f = open(source, 'rb')
395 self.db.files[self.classname, nodeid, None] = f.read()
396 f.close()
397 mime_type = None
398 props = self.getprops()
399 if props.has_key('type'):
400 mime_type = self.get(nodeid, 'type')
401 if not mime_type:
402 mime_type = self.default_mime_type
403 if props['content'].indexme:
404 self.db.indexer.add_text((self.classname, nodeid, 'content'),
405 self.get(nodeid, 'content'), mime_type)
407 # deviation from spec - was called ItemClass
408 class IssueClass(Class, roundupdb.IssueClass):
409 # Overridden methods:
410 def __init__(self, db, classname, **properties):
411 """The newly-created class automatically includes the "messages",
412 "files", "nosy", and "superseder" properties. If the 'properties'
413 dictionary attempts to specify any of these properties or a
414 "creation" or "activity" property, a ValueError is raised.
415 """
416 if not properties.has_key('title'):
417 properties['title'] = hyperdb.String(indexme='yes')
418 if not properties.has_key('messages'):
419 properties['messages'] = hyperdb.Multilink("msg")
420 if not properties.has_key('files'):
421 properties['files'] = hyperdb.Multilink("file")
422 if not properties.has_key('nosy'):
423 # note: journalling is turned off as it really just wastes
424 # space. this behaviour may be overridden in an instance
425 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
426 if not properties.has_key('superseder'):
427 properties['superseder'] = hyperdb.Multilink(classname)
428 Class.__init__(self, db, classname, **properties)
430 # vim: set et sts=4 sw=4 :