2bbddabf511c403f76805234e87ce2bf80c863e0
1 # $Id: test_memorydb.py,v 1.4 2004-11-03 01:34:21 richard Exp $
2 '''Implement an in-memory hyperdb for testing purposes.
3 '''
5 import shutil
7 from roundup import hyperdb
8 from roundup import roundupdb
9 from roundup import security
10 from roundup import password
11 from roundup import configuration
12 from roundup.backends import back_anydbm
13 from roundup.backends import indexer_dbm
14 from roundup.backends import sessions_dbm
15 from roundup.backends import indexer_common
16 from roundup.hyperdb import *
17 from roundup.support import ensureParentsExist
19 def new_config(debug=False):
20 config = configuration.CoreConfig()
21 config.DATABASE = "db"
22 #config.logging = MockNull()
23 # these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
24 if debug:
25 config.LOGGING_LEVEL = "DEBUG"
26 config.MAIL_DOMAIN = "your.tracker.email.domain.example"
27 config.TRACKER_WEB = "http://tracker.example/cgi-bin/roundup.cgi/bugs/"
28 return config
30 def create(journaltag, create=True, debug=False):
31 db = Database(new_config(debug), journaltag)
33 # load standard schema
34 schema = os.path.join(os.path.dirname(__file__),
35 '../share/roundup/templates/classic/schema.py')
36 vars = dict(globals())
37 vars['db'] = db
38 execfile(schema, vars)
39 initial_data = os.path.join(os.path.dirname(__file__),
40 '../share/roundup/templates/classic/initial_data.py')
41 vars = dict(db=db, admin_email='admin@test.com',
42 adminpw=password.Password('sekrit'))
43 execfile(initial_data, vars)
45 # load standard detectors
46 dirname = os.path.join(os.path.dirname(__file__),
47 '../share/roundup/templates/classic/detectors')
48 for fn in os.listdir(dirname):
49 if not fn.endswith('.py'): continue
50 vars = {}
51 execfile(os.path.join(dirname, fn), vars)
52 vars['init'](db)
54 '''
55 status = Class(db, "status", name=String())
56 status.setkey("name")
57 priority = Class(db, "priority", name=String(), order=String())
58 priority.setkey("name")
59 keyword = Class(db, "keyword", name=String(), order=String())
60 keyword.setkey("name")
61 user = Class(db, "user", username=String(), password=Password(),
62 assignable=Boolean(), age=Number(), roles=String(), address=String(),
63 supervisor=Link('user'),realname=String(),alternate_addresses=String())
64 user.setkey("username")
65 file = FileClass(db, "file", name=String(), type=String(),
66 comment=String(indexme="yes"), fooz=Password())
67 file_nidx = FileClass(db, "file_nidx", content=String(indexme='no'))
68 issue = IssueClass(db, "issue", title=String(indexme="yes"),
69 status=Link("status"), nosy=Multilink("user"), deadline=Date(),
70 foo=Interval(), files=Multilink("file"), assignedto=Link('user'),
71 priority=Link('priority'), spam=Multilink('msg'),
72 feedback=Link('msg'))
73 stuff = Class(db, "stuff", stuff=String())
74 session = Class(db, 'session', title=String())
75 msg = FileClass(db, "msg", date=Date(),
76 author=Link("user", do_journal='no'),
77 files=Multilink('file'), inreplyto=String(),
78 messageid=String(), summary=String(),
79 content=String(),
80 recipients=Multilink("user", do_journal='no')
81 )
82 '''
83 if create:
84 db.user.create(username="fred", roles='User',
85 password=password.Password('sekrit'), address='fred@example.com')
87 db.security.addPermissionToRole('User', 'Email Access')
88 '''
89 db.security.addPermission(name='Register', klass='user')
90 db.security.addPermissionToRole('User', 'Web Access')
91 db.security.addPermissionToRole('Anonymous', 'Email Access')
92 db.security.addPermissionToRole('Anonymous', 'Register', 'user')
93 for cl in 'issue', 'file', 'msg', 'keyword':
94 db.security.addPermissionToRole('User', 'View', cl)
95 db.security.addPermissionToRole('User', 'Edit', cl)
96 db.security.addPermissionToRole('User', 'Create', cl)
97 for cl in 'priority', 'status':
98 db.security.addPermissionToRole('User', 'View', cl)
99 '''
100 return db
102 class cldb(dict):
103 def close(self):
104 pass
106 class BasicDatabase(dict):
107 ''' Provide a nice encapsulation of an anydbm store.
109 Keys are id strings, values are automatically marshalled data.
110 '''
111 def __getitem__(self, key):
112 if key not in self:
113 d = self[key] = {}
114 return d
115 return super(BasicDatabase, self).__getitem__(key)
116 def exists(self, infoid):
117 return infoid in self
118 def get(self, infoid, value, default=None):
119 return self[infoid].get(value, default)
120 def getall(self, infoid):
121 return self[infoid]
122 def set(self, infoid, **newvalues):
123 self[infoid].update(newvalues)
124 def list(self):
125 return self.keys()
126 def destroy(self, infoid):
127 del self[infoid]
128 def commit(self):
129 pass
130 def close(self):
131 pass
132 def updateTimestamp(self, sessid):
133 pass
134 def clean(self):
135 pass
137 class Sessions(BasicDatabase, sessions_dbm.Sessions):
138 name = 'sessions'
140 class OneTimeKeys(BasicDatabase, sessions_dbm.Sessions):
141 name = 'otks'
143 class Indexer(indexer_dbm.Indexer):
144 def __init__(self, db):
145 indexer_common.Indexer.__init__(self, db)
146 self.reindex = 0
147 self.quiet = 9
148 self.changed = 0
150 def load_index(self, reload=0, wordlist=None):
151 # Unless reload is indicated, do not load twice
152 if self.index_loaded() and not reload:
153 return 0
154 self.words = {}
155 self.files = {'_TOP':(0,None)}
156 self.fileids = {}
157 self.changed = 0
159 def save_index(self):
160 pass
161 def force_reindex(self):
162 # TODO I'm concerned that force_reindex may not be tested by
163 # testForcedReindexing if the functionality can just be removed
164 pass
166 class Database(back_anydbm.Database):
167 """A database for storing records containing flexible data types.
169 Transaction stuff TODO:
171 - check the timestamp of the class file and nuke the cache if it's
172 modified. Do some sort of conflict checking on the dirty stuff.
173 - perhaps detect write collisions (related to above)?
174 """
175 def __init__(self, config, journaltag=None):
176 self.config, self.journaltag = config, journaltag
177 self.classes = {}
178 self.items = {}
179 self.ids = {}
180 self.journals = {}
181 self.files = {}
182 self.tx_files = {}
183 self.security = security.Security(self)
184 self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
185 'filtering': 0}
186 self.sessions = Sessions()
187 self.otks = OneTimeKeys()
188 self.indexer = Indexer(self)
190 # anydbm bits
191 self.cache = {} # cache of nodes loaded or created
192 self.dirtynodes = {} # keep track of the dirty nodes by class
193 self.newnodes = {} # keep track of the new nodes by class
194 self.destroyednodes = {}# keep track of the destroyed nodes by class
195 self.transactions = []
197 def filename(self, classname, nodeid, property=None, create=0):
198 shutil.copyfile(__file__, __file__+'.dummy')
199 return __file__+'.dummy'
201 def filesize(self, classname, nodeid, property=None, create=0):
202 return len(self.getfile(classname, nodeid, property))
204 def post_init(self):
205 pass
207 def refresh_database(self):
208 pass
210 def getSessionManager(self):
211 return self.sessions
213 def getOTKManager(self):
214 return self.otks
216 def reindex(self, classname=None, show_progress=False):
217 pass
219 def __repr__(self):
220 return '<memorydb instance at %x>'%id(self)
222 def storefile(self, classname, nodeid, property, content):
223 self.tx_files[classname, nodeid, property] = content
224 self.transactions.append((self.doStoreFile, (classname, nodeid,
225 property)))
227 def getfile(self, classname, nodeid, property):
228 if (classname, nodeid, property) in self.tx_files:
229 return self.tx_files[classname, nodeid, property]
230 return self.files[classname, nodeid, property]
232 def doStoreFile(self, classname, nodeid, property, **databases):
233 self.files[classname, nodeid, property] = self.tx_files[classname, nodeid, property]
234 return (classname, nodeid)
236 def rollbackStoreFile(self, classname, nodeid, property, **databases):
237 del self.tx_files[classname, nodeid, property]
239 def numfiles(self):
240 return len(self.files) + len(self.tx_files)
242 def close(self):
243 self.clearCache()
244 self.tx_files = {}
245 # kill the schema too
246 self.classes = {}
247 # just keep the .items
249 #
250 # Classes
251 #
252 def __getattr__(self, classname):
253 """A convenient way of calling self.getclass(classname)."""
254 if self.classes.has_key(classname):
255 return self.classes[classname]
256 raise AttributeError, classname
258 def addclass(self, cl):
259 cn = cl.classname
260 if self.classes.has_key(cn):
261 raise ValueError, cn
262 self.classes[cn] = cl
263 if cn not in self.items:
264 self.items[cn] = cldb()
265 self.ids[cn] = 0
267 # add default Edit and View permissions
268 self.security.addPermission(name="Create", klass=cn,
269 description="User is allowed to create "+cn)
270 self.security.addPermission(name="Edit", klass=cn,
271 description="User is allowed to edit "+cn)
272 self.security.addPermission(name="View", klass=cn,
273 description="User is allowed to access "+cn)
275 def getclasses(self):
276 """Return a list of the names of all existing classes."""
277 l = self.classes.keys()
278 l.sort()
279 return l
281 def getclass(self, classname):
282 """Get the Class object representing a particular class.
284 If 'classname' is not a valid class name, a KeyError is raised.
285 """
286 try:
287 return self.classes[classname]
288 except KeyError:
289 raise KeyError, 'There is no class called "%s"'%classname
291 #
292 # Class DBs
293 #
294 def clear(self):
295 self.items = {}
297 def getclassdb(self, classname, mode='r'):
298 """ grab a connection to the class db that will be used for
299 multiple actions
300 """
301 return self.items[classname]
303 def getCachedJournalDB(self, classname):
304 return self.journals.setdefault(classname, {})
306 #
307 # Node IDs
308 #
309 def newid(self, classname):
310 self.ids[classname] += 1
311 return str(self.ids[classname])
312 def setid(self, classname, id):
313 self.ids[classname] = int(id)
315 #
316 # Journal
317 #
318 def doSaveJournal(self, classname, nodeid, action, params, creator,
319 creation):
320 if creator is None:
321 creator = self.getuid()
322 if creation is None:
323 creation = date.Date()
324 self.journals.setdefault(classname, {}).setdefault(nodeid,
325 []).append((nodeid, creation, creator, action, params))
327 def doSetJournal(self, classname, nodeid, journal):
328 self.journals.setdefault(classname, {})[nodeid] = journal
330 def getjournal(self, classname, nodeid):
331 # our journal result
332 res = []
334 # add any journal entries for transactions not committed to the
335 # database
336 for method, args in self.transactions:
337 if method != self.doSaveJournal:
338 continue
339 (cache_classname, cache_nodeid, cache_action, cache_params,
340 cache_creator, cache_creation) = args
341 if cache_classname == classname and cache_nodeid == nodeid:
342 if not cache_creator:
343 cache_creator = self.getuid()
344 if not cache_creation:
345 cache_creation = date.Date()
346 res.append((cache_nodeid, cache_creation, cache_creator,
347 cache_action, cache_params))
348 try:
349 res += self.journals.get(classname, {})[nodeid]
350 except KeyError:
351 if res: return res
352 raise IndexError, nodeid
353 return res
355 def pack(self, pack_before):
356 """ Delete all journal entries except "create" before 'pack_before'.
357 """
358 pack_before = pack_before.serialise()
359 for classname in self.journals:
360 db = self.journals[classname]
361 for key in db:
362 # get the journal for this db entry
363 l = []
364 last_set_entry = None
365 for entry in db[key]:
366 # unpack the entry
367 (nodeid, date_stamp, self.journaltag, action,
368 params) = entry
369 date_stamp = date_stamp.serialise()
370 # if the entry is after the pack date, _or_ the initial
371 # create entry, then it stays
372 if date_stamp > pack_before or action == 'create':
373 l.append(entry)
374 db[key] = l
376 class Class(back_anydbm.Class):
377 pass
379 class FileClass(back_anydbm.FileClass):
380 def __init__(self, db, classname, **properties):
381 if not properties.has_key('content'):
382 properties['content'] = hyperdb.String(indexme='yes')
383 if not properties.has_key('type'):
384 properties['type'] = hyperdb.String()
385 back_anydbm.Class.__init__(self, db, classname, **properties)
387 def export_files(self, dirname, nodeid):
388 dest = self.exportFilename(dirname, nodeid)
389 ensureParentsExist(dest)
390 f = open(dest, 'wb')
391 f.write(self.db.files[self.classname, nodeid, None])
392 f.close()
394 def import_files(self, dirname, nodeid):
395 source = self.exportFilename(dirname, nodeid)
396 f = open(source, 'rb')
397 self.db.files[self.classname, nodeid, None] = f.read()
398 f.close()
399 mime_type = None
400 props = self.getprops()
401 if props.has_key('type'):
402 mime_type = self.get(nodeid, 'type')
403 if not mime_type:
404 mime_type = self.default_mime_type
405 if props['content'].indexme:
406 self.db.indexer.add_text((self.classname, nodeid, 'content'),
407 self.get(nodeid, 'content'), mime_type)
409 # deviation from spec - was called ItemClass
410 class IssueClass(Class, roundupdb.IssueClass):
411 # Overridden methods:
412 def __init__(self, db, classname, **properties):
413 """The newly-created class automatically includes the "messages",
414 "files", "nosy", and "superseder" properties. If the 'properties'
415 dictionary attempts to specify any of these properties or a
416 "creation" or "activity" property, a ValueError is raised.
417 """
418 if not properties.has_key('title'):
419 properties['title'] = hyperdb.String(indexme='yes')
420 if not properties.has_key('messages'):
421 properties['messages'] = hyperdb.Multilink("msg")
422 if not properties.has_key('files'):
423 properties['files'] = hyperdb.Multilink("file")
424 if not properties.has_key('nosy'):
425 # note: journalling is turned off as it really just wastes
426 # space. this behaviour may be overridden in an instance
427 properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
428 if not properties.has_key('superseder'):
429 properties['superseder'] = hyperdb.Multilink(classname)
430 Class.__init__(self, db, classname, **properties)
432 # vim: set et sts=4 sw=4 :