From: richard Date: Mon, 22 Feb 2010 05:26:57 +0000 (+0000) Subject: Beginnings of a big code cleanup / modernisation to make 2to3 happy X-Git-Url: https://git.tokkee.org/?a=commitdiff_plain;h=430ffee2115c6aef9dd701134686d28168fd4123;p=roundup.git Beginnings of a big code cleanup / modernisation to make 2to3 happy git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk@4463 57a73879-2fb5-44c3-a270-3262357dd7e2 --- diff --git a/2to3-done.txt b/2to3-done.txt new file mode 100644 index 0000000..0d23e77 --- /dev/null +++ b/2to3-done.txt @@ -0,0 +1,156 @@ +CAN'T VERIFY + +./roundup/backends/back_mysql.py +./roundup/backends/back_tsearch2.py +./roundup/backends/indexer_xapian.py + +TODO + +./roundup/backends/rdbms_common.py +./roundup/backends/sessions_dbm.py +./roundup/backends/sessions_rdbms.py +./roundup/backends/tsearch2_setup.py +./roundup/cgi/__init__.py +./roundup/cgi/accept_language.py +./roundup/cgi/actions.py +./roundup/cgi/apache.py +./roundup/cgi/cgitb.py +./roundup/cgi/client.py +./roundup/cgi/exceptions.py +./roundup/cgi/form_parser.py +./roundup/cgi/MultiMapping.py +./roundup/cgi/PageTemplates/__init__.py +./roundup/cgi/PageTemplates/Expressions.py +./roundup/cgi/PageTemplates/GlobalTranslationService.py +./roundup/cgi/PageTemplates/MultiMapping.py +./roundup/cgi/PageTemplates/PageTemplate.py +./roundup/cgi/PageTemplates/PathIterator.py +./roundup/cgi/PageTemplates/PythonExpr.py +./roundup/cgi/PageTemplates/TALES.py +./roundup/cgi/TAL/__init__.py +./roundup/cgi/TAL/DummyEngine.py +./roundup/cgi/TAL/HTMLParser.py +./roundup/cgi/TAL/HTMLTALParser.py +./roundup/cgi/TAL/markupbase.py +./roundup/cgi/TAL/TALDefs.py +./roundup/cgi/TAL/TALGenerator.py +./roundup/cgi/TAL/talgettext.py +./roundup/cgi/TAL/TALInterpreter.py +./roundup/cgi/TAL/TALParser.py +./roundup/cgi/TAL/TranslationContext.py +./roundup/cgi/TAL/XMLParser.py +./roundup/cgi/templating.py +./roundup/cgi/TranslationService.py +./roundup/cgi/wsgi_handler.py +./roundup/cgi/zLOG.py +./roundup/cgi/ZTUtils/__init__.py +./roundup/cgi/ZTUtils/Batch.py +./roundup/cgi/ZTUtils/Iterator.py +./roundup/configuration.py +./roundup/date.py +./roundup/dist/__init__.py +./roundup/dist/command/__init__.py +./roundup/dist/command/bdist_rpm.py +./roundup/dist/command/build.py +./roundup/dist/command/build_doc.py +./roundup/dist/command/build_py.py +./roundup/dist/command/build_scripts.py +./roundup/exceptions.py +./roundup/hyperdb.py +./roundup/i18n.py +./roundup/init.py +./roundup/install_util.py +./roundup/instance.py +./roundup/mailer.py +./roundup/mailgw.py +./roundup/msgfmt.py +./roundup/password.py +./roundup/rfc2822.py +./roundup/roundupdb.py +./roundup/scripts/__init__.py +./roundup/scripts/roundup_admin.py +./roundup/scripts/roundup_demo.py +./roundup/scripts/roundup_gettext.py +./roundup/scripts/roundup_mailgw.py +./roundup/scripts/roundup_server.py +./roundup/scripts/roundup_xmlrpc_server.py +./roundup/security.py +./roundup/support.py +./roundup/token.py +./roundup/version_check.py +./roundup/xmlrpc.py +./run_tests.py +./scripts/copy-user.py +./scripts/imapServer.py +./scripts/import_sf.py +./scripts/schema_diagram.py +./setup.py +./share/roundup/templates/classic/detectors/messagesummary.py +./share/roundup/templates/classic/detectors/nosyreaction.py +./share/roundup/templates/classic/detectors/statusauditor.py +./share/roundup/templates/classic/detectors/userauditor.py +./share/roundup/templates/classic/initial_data.py +./share/roundup/templates/classic/schema.py +./share/roundup/templates/minimal/detectors/userauditor.py +./share/roundup/templates/minimal/initial_data.py +./share/roundup/templates/minimal/schema.py +./test/__init__.py +./test/benchmark.py +./test/db_test_base.py +./test/memorydb.py +./test/mocknull.py +./test/session_common.py +./test/test_actions.py +./test/test_anydbm.py +./test/test_anypy_hashlib.py +./test/test_cgi.py +./test/test_dates.py +./test/test_hyperdbvals.py +./test/test_indexer.py +./test/test_locking.py +./test/test_mailer.py +./test/test_mailgw.py +./test/test_mailsplit.py +./test/test_memorydb.py +./test/test_multipart.py +./test/test_mysql.py +./test/test_postgresql.py +./test/test_rfc2822.py +./test/test_schema.py +./test/test_security.py +./test/test_sqlite.py +./test/test_templating.py +./test/test_textfmt.py +./test/test_token.py +./test/test_tsearch2.py +./test/test_userauditor.py +./test/test_xmlrpc.py +./test.py +./tools/fixroles.py +./tools/load_tracker.py +./tools/migrate-queries.py +./tools/pygettext.py +./frontends/ZRoundup/__init__.py +./frontends/ZRoundup/ZRoundup.py + + + +DONE + +./doc/conf.py +./roundup/__init__.py +./roundup/admin.py +./roundup/actions.py +./roundup/anypy/__init__.py +./roundup/anypy/hashlib_.py +./roundup/anypy/sets_.py +./roundup/backends/__init__.py +./roundup/backends/back_anydbm.py +./roundup/backends/back_postgresql.py +./roundup/backends/back_sqlite.py +./roundup/backends/blobfiles.py +./roundup/backends/indexer_common.py +./roundup/backends/indexer_dbm.py +./roundup/backends/indexer_rdbms.py +./roundup/backends/locking.py +./roundup/backends/portalocker.py diff --git a/CHANGES.txt b/CHANGES.txt index 68d8b7b..3e8dfd9 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,7 +1,7 @@ This file contains the changes to the Roundup system over time. The entries are given with the most recent entry first. -2010-02-?? 1.4.13 +2010-02-19 1.4.13 Fixed: - Multilink edit fields lose their values (thanks Will Maier) diff --git a/doc/announcement.txt b/doc/announcement.txt index 51de8d8..70a6e96 100644 --- a/doc/announcement.txt +++ b/doc/announcement.txt @@ -1,39 +1,11 @@ -I'm proud to release version 1.4.12 of Roundup which fixes a number bugs. +I'm proud to release version 1.4.13 of Roundup which includes a single +fix for a regression introduced in 1.4.12: + +- Multilink edit fields lose their values (thanks Will Maier) If you're upgrading from an older version of Roundup you *must* follow the "Software Upgrade" guidelines given in the maintenance documentation. -This release includes: - -- Support IMAP CRAM-MD5, thanks Jochen Maes -- Proper handling of 'Create' permissions in both mail gateway (earlier - commit r4405 by Richard), web interface, and xmlrpc. This used to - check 'Edit' permission previously. See - http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5133 - Add regression tests for proper handling of 'Create' and 'Edit' - permissions. -- Fix handling of non-ascii in realname in the nosy mailer, this used to - mangle the email address making it unusable when replying. Thanks to - intevation for funding the fix. -- Fix documentation on user required to run the tests, fixes - issue2550618, thanks to Chris aka 'radioking' -- Add simple doc about translating customised tracker content -- Add "flup" setup documentation, thanks Christian Glass -- Fix "Web Access" permission check to allow serving of static files to - Anonymous again -- Add check for "Web Access" permission in all web templating permission - checks -- Improvements in upgrading documentation, thanks Christian Glass -- Display 'today' in the account user's timezone, thanks David Wolever -- Fix file handle leak in some web interfaces with logging turned on, - fixes issue1675845 -- Attempt to generate more human-readable addresses in email, fixes - issue2550632 -- Allow value to be specified to multilink form element templating, fixes - issue2550613, thanks David Wolever -- Fix thread safety with stdin in roundup-server, fixes issue2550596 - (thanks Werner Hunger) - Roundup requires python 2.3 or later (but not 3+) for correct operation. To give Roundup a try, just download (see below), unpack and run:: diff --git a/doc/conf.py b/doc/conf.py index 9837f89..3276c57 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -169,8 +169,8 @@ htmlhelp_basename = 'Roundupdoc' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). latex_documents = [ - ('docs/index', 'Roundup.tex', ur'Roundup Documentation', - ur'Richard Jones', 'manual'), + ('docs/index', 'Roundup.tex', u'Roundup Documentation', + u'Richard Jones', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of diff --git a/roundup/__init__.py b/roundup/__init__.py index 3ef1919..f646c11 100644 --- a/roundup/__init__.py +++ b/roundup/__init__.py @@ -68,6 +68,6 @@ much prettier cake :) ''' __docformat__ = 'restructuredtext' -__version__ = '1.4.12' +__version__ = '1.5.0' # vim: set filetype=python ts=4 sw=4 et si diff --git a/roundup/actions.py b/roundup/actions.py index ea1a15e..e3bbec5 100644 --- a/roundup/actions.py +++ b/roundup/actions.py @@ -49,8 +49,8 @@ class Retire(Action): # make sure we don't try to retire admin or anonymous if (classname == 'user' and self.db.user.get(itemid, 'username') in ('admin', 'anonymous')): - raise ValueError, self._( - 'You may not retire the admin or anonymous user') + raise ValueError(self._( + 'You may not retire the admin or anonymous user')) # do the retire self.db.getclass(classname).retire(itemid) diff --git a/roundup/admin.py b/roundup/admin.py index a8d75d1..1b583e3 100644 --- a/roundup/admin.py +++ b/roundup/admin.py @@ -21,7 +21,7 @@ """ __docformat__ = 'restructuredtext' -import csv, getopt, getpass, os, re, shutil, sys, UserDict +import csv, getopt, getpass, os, re, shutil, sys, UserDict, operator from roundup import date, hyperdb, roundupdb, init, password, token from roundup import __version__ as roundup_version @@ -37,16 +37,15 @@ class CommandDict(UserDict.UserDict): """ _marker = [] def get(self, key, default=_marker): - if self.data.has_key(key): + if key in self.data: return [(key, self.data[key])] - keylist = self.data.keys() - keylist.sort() + keylist = sorted(self.data) l = [] for ki in keylist: if ki.startswith(key): l.append((ki, self.data[ki])) if not l and default is self._marker: - raise KeyError, key + raise KeyError(key) return l class AdminTool: @@ -63,11 +62,11 @@ class AdminTool: """ def __init__(self): self.commands = CommandDict() - for k in AdminTool.__dict__.keys(): + for k in AdminTool.__dict__: if k[:3] == 'do_': self.commands[k[3:]] = getattr(self, k) self.help = {} - for k in AdminTool.__dict__.keys(): + for k in AdminTool.__dict__: if k[:5] == 'help_': self.help[k[5:]] = getattr(self, k) self.tracker_home = '' @@ -80,7 +79,7 @@ class AdminTool: try: return self.db.getclass(classname) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) def props_from_args(self, args): """ Produce a dictionary of prop: value from the args list. @@ -90,12 +89,12 @@ class AdminTool: props = {} for arg in args: if arg.find('=') == -1: - raise UsageError, _('argument "%(arg)s" not propname=value' - )%locals() + raise UsageError(_('argument "%(arg)s" not propname=value' + )%locals()) l = arg.split('=') if len(l) < 2: - raise UsageError, _('argument "%(arg)s" not propname=value' - )%locals() + raise UsageError(_('argument "%(arg)s" not propname=value' + )%locals()) key, value = l[0], '='.join(l[1:]) if value: props[key] = value @@ -137,7 +136,7 @@ Help: """ print _('Commands:'), commands = [''] - for command in self.commands.values(): + for command in self.commands.itervalues(): h = _(command.__doc__).split('\n')[0] commands.append(' '+h[7:]) commands.sort() @@ -150,10 +149,8 @@ matches only one command, e.g. l == li == lis == list.""")) def help_commands_html(self, indent_re=re.compile(r'^(\s+)\S+')): """ Produce an HTML command list. """ - commands = self.commands.values() - def sortfun(a, b): - return cmp(a.__name__, b.__name__) - commands.sort(sortfun) + commands = sorted(self.commands.itervalues(), + operator.attrgetter('__name__')) for command in commands: h = _(command.__doc__).split('\n') name = command.__name__[3:] @@ -255,7 +252,7 @@ Command help: # try help_ methods - if self.help.has_key(topic): + if topic in self.help: self.help[topic]() return 0 @@ -340,7 +337,7 @@ Command help: def help_initopts(self): templates = self.listTemplates() - print _('Templates:'), ', '.join(templates.keys()) + print _('Templates:'), ', '.join(templates) import roundup.backends backends = roundup.backends.list_backends() print _('Back ends:'), ', '.join(backends) @@ -369,19 +366,19 @@ Command help: See also initopts help. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) # make sure the tracker home can be created tracker_home = os.path.abspath(tracker_home) parent = os.path.split(tracker_home)[0] if not os.path.exists(parent): - raise UsageError, _('Instance home parent directory "%(parent)s"' - ' does not exist')%locals() + raise UsageError(_('Instance home parent directory "%(parent)s"' + ' does not exist')%locals()) config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE) # check for both old- and new-style configs - if filter(os.path.exists, [config_ini_file, - os.path.join(tracker_home, 'config.py')]): + if list(filter(os.path.exists, [config_ini_file, + os.path.join(tracker_home, 'config.py')])): ok = raw_input(_( """WARNING: There appears to be a tracker in "%(tracker_home)s"! If you re-install it, you will lose all the data! @@ -395,9 +392,9 @@ Erase it? Y/N: """) % locals()) # select template templates = self.listTemplates() template = len(args) > 1 and args[1] or '' - if not templates.has_key(template): - print _('Templates:'), ', '.join(templates.keys()) - while not templates.has_key(template): + if template not in templates: + print _('Templates:'), ', '.join(templates) + while template not in templates: template = raw_input(_('Select template [classic]: ')).strip() if not template: template = 'classic' @@ -439,8 +436,8 @@ Erase it? Y/N: """) % locals()) need_set = CoreConfig(tracker_home)._get_unset_options() if need_set: print _(" ... at a minimum, you must set following options:") - for section, options in need_set.items(): - print " [%s]: %s" % (section, ", ".join(options)) + for section in need_set: + print " [%s]: %s" % (section, ", ".join(need_set[section])) # note about schema modifications print _(""" @@ -466,7 +463,7 @@ Erase it? Y/N: """) % locals()) in . """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) config = CoreConfig() config.save(args[0]) @@ -490,11 +487,11 @@ Erase it? Y/N: """) % locals()) # make sure the tracker home is installed if not os.path.exists(tracker_home): - raise UsageError, _('Instance home does not exist')%locals() + raise UsageError(_('Instance home does not exist')%locals()) try: tracker = roundup.instance.open(tracker_home) except roundup.instance.TrackerError: - raise UsageError, _('Instance has not been installed')%locals() + raise UsageError(_('Instance has not been installed')%locals()) # is there already a database? if tracker.exists(): @@ -530,7 +527,7 @@ Erase it? Y/N: """)) by the designators. """ if len(args) < 2: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) propname = args[0] designators = args[1].split(',') l = [] @@ -539,7 +536,7 @@ Erase it? Y/N: """)) try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the class cl = self.get_class(classname) @@ -563,7 +560,9 @@ Erase it? Y/N: """)) property = properties[propname] if not (isinstance(property, hyperdb.Multilink) or isinstance(property, hyperdb.Link)): - raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname + raise UsageError(_('property %s is not of type' + ' Multilink or Link so -d flag does not ' + 'apply.')%propname) propclassname = self.db.getclass(property.classname).classname id = cl.get(nodeid, propname) for i in id: @@ -578,7 +577,9 @@ Erase it? Y/N: """)) property = properties[propname] if not (isinstance(property, hyperdb.Multilink) or isinstance(property, hyperdb.Link)): - raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname + raise UsageError(_('property %s is not of type' + ' Multilink or Link so -d flag does not ' + 'apply.')%propname) propclassname = self.db.getclass(property.classname).classname id = cl.get(nodeid, propname) for i in id: @@ -586,10 +587,11 @@ Erase it? Y/N: """)) else: print cl.get(nodeid, propname) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) except KeyError: - raise UsageError, _('no such %(classname)s property ' - '"%(propname)s"')%locals() + raise UsageError(_('no such %(classname)s property ' + '"%(propname)s"')%locals()) if self.separator: print self.separator.join(l) @@ -612,7 +614,7 @@ Erase it? Y/N: """)) ids for the multilink as comma-separated numbers (ie "1,2,3"). """ if len(args) < 2: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb designators = args[0].split(',') @@ -628,7 +630,7 @@ Erase it? Y/N: """)) try: designators = [hyperdb.splitDesignator(x) for x in designators] except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the props from the args props = self.props_from_args(args[1:]) @@ -643,14 +645,14 @@ Erase it? Y/N: """)) props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid, key, value) except hyperdb.HyperdbValueError, message: - raise UsageError, message + raise UsageError(message) # try the set try: - apply(cl.set, (itemid, ), props) + cl.set(itemid, **props) except (TypeError, IndexError, ValueError), message: import traceback; traceback.print_exc() - raise UsageError, message + raise UsageError(message) self.db_uncommitted = True return 0 @@ -663,7 +665,7 @@ Erase it? Y/N: """)) value. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class cl = self.get_class(classname) @@ -672,7 +674,7 @@ Erase it? Y/N: """)) props = self.props_from_args(args[1:]) # convert the user-input value to a value used for find() - for propname, value in props.items(): + for propname, value in props.iteritems(): if ',' in value: values = value.split(',') else: @@ -692,26 +694,26 @@ Erase it? Y/N: """)) designator = [] if self.separator: if self.print_designator: - id=apply(cl.find, (), props) + id = cl.find(**props) for i in id: designator.append(classname + i) print self.separator.join(designator) else: - print self.separator.join(apply(cl.find, (), props)) + print self.separator.join(cl.find(**props)) else: if self.print_designator: - id=apply(cl.find, (), props) + id = cl.find(**props) for i in id: designator.append(classname + i) print designator else: - print apply(cl.find, (), props) + print cl.find(**props) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) except (ValueError, TypeError), message: - raise UsageError, message + raise UsageError(message) return 0 def do_specification(self, args): @@ -721,14 +723,15 @@ Erase it? Y/N: """)) This lists the properties for a given class. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class cl = self.get_class(classname) # get the key property keyprop = cl.getkey() - for key, value in cl.properties.items(): + for key in cl.properties: + value = cl.properties[key] if keyprop == key: print _('%(key)s: %(value)s (key property)')%locals() else: @@ -745,21 +748,20 @@ Erase it? Y/N: """)) node. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) # decode the node designator for designator in args[0].split(','): try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) # get the class cl = self.get_class(classname) # display the values - keys = cl.properties.keys() - keys.sort() + keys = sorted(cl.properties) for key in keys: value = cl.get(nodeid, key) print _('%(key)s: %(value)s')%locals() @@ -773,7 +775,7 @@ Erase it? Y/N: """)) command. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb classname = args[0] @@ -786,8 +788,9 @@ Erase it? Y/N: """)) properties = cl.getprops(protected = 0) if len(args) == 1: # ask for the properties - for key, value in properties.items(): + for key in properties: if key == 'id': continue + value = properties[key] name = value.__class__.__name__ if isinstance(value , hyperdb.Password): again = None @@ -808,24 +811,24 @@ Erase it? Y/N: """)) props = self.props_from_args(args[1:]) # convert types - for propname, value in props.items(): + for propname in props: try: props[propname] = hyperdb.rawToHyperdb(self.db, cl, None, - propname, value) + propname, props[propname]) except hyperdb.HyperdbValueError, message: - raise UsageError, message + raise UsageError(message) # check for the key property propname = cl.getkey() - if propname and not props.has_key(propname): - raise UsageError, _('you must provide the "%(propname)s" ' - 'property.')%locals() + if propname and propname not in props: + raise UsageError(_('you must provide the "%(propname)s" ' + 'property.')%locals()) # do the actual create try: - print apply(cl.create, (), props) + print cl.create(**props) except (TypeError, IndexError, ValueError), message: - raise UsageError, message + raise UsageError(message) self.db_uncommitted = True return 0 @@ -843,9 +846,9 @@ Erase it? Y/N: """)) for every class instance. """ if len(args) > 2: - raise UsageError, _('Too many arguments supplied') + raise UsageError(_('Too many arguments supplied')) if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class @@ -865,8 +868,8 @@ Erase it? Y/N: """)) try: proplist.append(cl.get(nodeid, propname)) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) print self.separator.join(proplist) else: # create a list of index id's since user didn't specify @@ -877,8 +880,8 @@ Erase it? Y/N: """)) try: value = cl.get(nodeid, propname) except KeyError: - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) print _('%(nodeid)4s: %(value)s')%locals() return 0 @@ -912,7 +915,7 @@ Erase it? Y/N: """)) will result in a the 4 character wide "Name" column. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) classname = args[0] # get the class @@ -927,14 +930,15 @@ Erase it? Y/N: """)) try: propname, width = spec.split(':') except (ValueError, TypeError): - raise UsageError, _('"%(spec)s" not name:width')%locals() + raise UsageError(_('"%(spec)s" not ' + 'name:width')%locals()) else: propname = spec - if not all_props.has_key(propname): - raise UsageError, _('%(classname)s has no property ' - '"%(propname)s"')%locals() + if propname not in all_props: + raise UsageError(_('%(classname)s has no property ' + '"%(propname)s"')%locals()) else: - prop_names = cl.getprops().keys() + prop_names = cl.getprops() # now figure column widths props = [] @@ -986,18 +990,19 @@ Erase it? Y/N: """)) Lists the journal entries for the node identified by the designator. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) try: classname, nodeid = hyperdb.splitDesignator(args[0]) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: print self.db.getclass(classname).history(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) return 0 def do_commit(self, args): @@ -1039,19 +1044,20 @@ Erase it? Y/N: """)) by the list or find commands, and its key value may be re-used. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) designators = args[0].split(',') for designator in designators: try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: self.db.getclass(classname).retire(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) self.db_uncommitted = True return 0 @@ -1065,19 +1071,20 @@ Erase it? Y/N: """)) The given nodes will become available for users again. """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) designators = args[0].split(',') for designator in designators: try: classname, nodeid = hyperdb.splitDesignator(designator) except hyperdb.DesignatorError, message: - raise UsageError, message + raise UsageError(message) try: self.db.getclass(classname).restore(nodeid) except KeyError: - raise UsageError, _('no such class "%(classname)s"')%locals() + raise UsageError(_('no such class "%(classname)s"')%locals()) except IndexError: - raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals() + raise UsageError(_('no such %(classname)s node ' + '"%(nodeid)s"')%locals()) self.db_uncommitted = True return 0 @@ -1096,19 +1103,19 @@ Erase it? Y/N: """)) """ # grab the directory to export to if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) dir = args[-1] # get the list of classes to export if len(args) == 2: if args[0].startswith('-'): - classes = [ c for c in self.db.classes.keys() + classes = [ c for c in self.db.classes if not c in args[0][1:].split(',') ] else: classes = args[0].split(',') else: - classes = self.db.classes.keys() + classes = self.db.classes class colon_separated(csv.excel): delimiter = ':' @@ -1166,7 +1173,8 @@ Erase it? Y/N: """)) sys.stdout.write("\nExporting Journal for %s\n" % classname) sys.stdout.flush() journals = csv.writer(jf, colon_separated) - map(journals.writerow, cl.export_journals()) + for row in cl.export_journals(): + journals.writerow(row) jf.close() if max_len > self.db.config.CSV_FIELD_SIZE: print >> sys.stderr, \ @@ -1209,7 +1217,7 @@ Erase it? Y/N: """)) database (or, tediously, retire all the old data.) """ if len(args) < 1: - raise UsageError, _('Not enough arguments supplied') + raise UsageError(_('Not enough arguments supplied')) from roundup import hyperdb if hasattr (csv, 'field_size_limit'): @@ -1250,7 +1258,10 @@ Erase it? Y/N: """)) if hasattr(cl, 'import_files'): cl.import_files(dir, nodeid) maxid = max(maxid, int(nodeid)) + + # (print to sys.stdout here to allow tests to squash it .. ugh) print >> sys.stdout + f.close() # import the journals @@ -1259,8 +1270,10 @@ Erase it? Y/N: """)) cl.import_journals(reader) f.close() - # set the id counter + # (print to sys.stdout here to allow tests to squash it .. ugh) print >> sys.stdout, 'setting', classname, maxid+1 + + # set the id counter self.db.setid(classname, str(maxid+1)) self.db_uncommitted = True @@ -1284,8 +1297,8 @@ Erase it? Y/N: """)) 2001-01-01 """ - if len(args) <> 1: - raise UsageError, _('Not enough arguments supplied') + if len(args) != 1: + raise UsageError(_('Not enough arguments supplied')) # are we dealing with a period or a date value = args[0] @@ -1295,7 +1308,7 @@ Erase it? Y/N: """)) """, re.VERBOSE) m = date_re.match(value) if not m: - raise ValueError, _('Invalid format') + raise ValueError(_('Invalid format')) m = m.groupdict() if m['period']: pack_before = date.Date(". - %s"%value) @@ -1320,8 +1333,8 @@ Erase it? Y/N: """)) try: cl.index(m.group(2)) except IndexError: - raise UsageError, _('no such item "%(designator)s"')%{ - 'designator': arg} + raise UsageError(_('no such item "%(designator)s"')%{ + 'designator': arg}) else: cl = self.get_class(arg) self.db.reindex(arg) @@ -1341,7 +1354,7 @@ Erase it? Y/N: """)) print _('No such Role "%(role)s"')%locals() return 1 else: - roles = self.db.security.role.items() + roles = list(self.db.security.role.items()) role = self.db.config.NEW_WEB_USER_ROLES if ',' in role: print _('New Web users get the Roles "%(role)s"')%locals() @@ -1517,7 +1530,7 @@ Erase it? Y/N: """)) self.tracker_home = os.environ.get('TRACKER_HOME', '') # TODO: reinstate the user/password stuff (-u arg too) name = password = '' - if os.environ.has_key('ROUNDUP_LOGIN'): + if 'ROUNDUP_LOGIN' in os.environ: l = os.environ['ROUNDUP_LOGIN'].split(':') name = l[0] if len(l) > 1: diff --git a/roundup/backends/__init__.py b/roundup/backends/__init__.py index 8735151..1fc7948 100644 --- a/roundup/backends/__init__.py +++ b/roundup/backends/__init__.py @@ -38,7 +38,7 @@ def get_backend(name): '''Get a specific backend by name.''' vars = globals() # if requested backend has been imported yet, return current instance - if vars.has_key(name): + if name in vars: return vars[name] # import the backend module module_name = 'back_%s' % name diff --git a/roundup/backends/back_anydbm.py b/roundup/backends/back_anydbm.py index cfda757..3962a63 100644 --- a/roundup/backends/back_anydbm.py +++ b/roundup/backends/back_anydbm.py @@ -22,30 +22,22 @@ serious bugs, and is not available) """ __docformat__ = 'restructuredtext' -try: - import anydbm, sys - # dumbdbm only works in python 2.1.2+ - if sys.version_info < (2,1,2): - import dumbdbm - assert anydbm._defaultmod != dumbdbm - del dumbdbm -except AssertionError: - print "WARNING: you should upgrade to python 2.1.3" +import os, marshal, re, weakref, string, copy, time, shutil, logging -import whichdb, os, marshal, re, weakref, string, copy, time, shutil, logging +from roundup.anypy.dbm_ import anydbm, whichdb from roundup import hyperdb, date, password, roundupdb, security, support from roundup.support import reversed from roundup.backends import locking from roundup.i18n import _ -from blobfiles import FileStorage -from sessions_dbm import Sessions, OneTimeKeys +from roundup.backends.blobfiles import FileStorage +from roundup.backends.sessions_dbm import Sessions, OneTimeKeys try: - from indexer_xapian import Indexer + from roundup.backends.indexer_xapian import Indexer except ImportError: - from indexer_dbm import Indexer + from roundup.backends.indexer_dbm import Indexer def db_exists(config): # check for the user db @@ -146,13 +138,13 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # def __getattr__(self, classname): """A convenient way of calling self.getclass(classname).""" - if self.classes.has_key(classname): + if classname in self.classes: return self.classes[classname] raise AttributeError, classname def addclass(self, cl): cn = cl.classname - if self.classes.has_key(cn): + if cn in self.classes: raise ValueError, cn self.classes[cn] = cl @@ -166,9 +158,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def getclasses(self): """Return a list of the names of all existing classes.""" - l = self.classes.keys() - l.sort() - return l + return sorted(self.classes) def getclass(self, classname): """Get the Class object representing a particular class. @@ -178,7 +168,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): try: return self.classes[classname] except KeyError: - raise KeyError, 'There is no class called "%s"'%classname + raise KeyError('There is no class called "%s"'%classname) # # Class DBs @@ -187,7 +177,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """Delete all database contents """ logging.getLogger('hyperdb').info('clear') - for cn in self.classes.keys(): + for cn in self.classes: for dummy in 'nodes', 'journals': path = os.path.join(self.dir, 'journals.%s'%cn) if os.path.exists(path): @@ -212,10 +202,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ db_type = '' if os.path.exists(path): - db_type = whichdb.whichdb(path) + db_type = whichdb(path) if not db_type: - raise hyperdb.DatabaseError, \ - _("Couldn't identify database type") + raise hyperdb.DatabaseError(_("Couldn't identify database type")) elif os.path.exists(path+'.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! @@ -231,21 +220,24 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): db_type = self.determine_db_type(path) # new database? let anydbm pick the best dbm - if not db_type: + # in Python 3+ the "dbm" ("anydbm" to us) module already uses the + # whichdb() function to do this + if not db_type or hasattr(anydbm, 'whichdb'): if __debug__: - logging.getLogger('hyperdb').debug("opendb anydbm.open(%r, 'c')"%path) + logging.getLogger('hyperdb').debug( + "opendb anydbm.open(%r, 'c')"%path) return anydbm.open(path, 'c') - # open the database with the correct module + # in Python <3 it anydbm was a little dumb so manually open the + # database with the correct module try: dbm = __import__(db_type) except ImportError: - raise hyperdb.DatabaseError, \ - _("Couldn't open database - the required module '%s'"\ - " is not available")%db_type + raise hyperdb.DatabaseError(_("Couldn't open database - the " + "required module '%s' is not available")%db_type) if __debug__: - logging.getLogger('hyperdb').debug("opendb %r.open(%r, %r)"%(db_type, path, - mode)) + logging.getLogger('hyperdb').debug( + "opendb %r.open(%r, %r)"%(db_type, path, mode)) return dbm.open(path, mode) # @@ -256,7 +248,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ # open the ids DB - create if if doesn't exist db = self.opendb('_ids', 'c') - if db.has_key(classname): + if classname in db: newid = db[classname] = str(int(db[classname]) + 1) else: # the count() bit is transitional - older dbs won't start at 1 @@ -280,7 +272,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ add the specified node to its class's db """ # we'll be supplied these props if we're doing an import - if not node.has_key('creator'): + if 'creator' not in node: # add in the "calculated" properties (dupe so we don't affect # calling code's node assumptions) node = node.copy() @@ -316,7 +308,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ # try the cache cache_dict = self.cache.setdefault(classname, {}) - if cache_dict.has_key(nodeid): + if nodeid in cache_dict: if __debug__: logging.getLogger('hyperdb').debug('get %s%s cached'%(classname, nodeid)) self.stats['cache_hits'] += 1 @@ -330,13 +322,13 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): # get from the database and save in the cache if db is None: db = self.getclassdb(classname) - if not db.has_key(nodeid): - raise IndexError, "no such %s %s"%(classname, nodeid) + if nodeid not in db: + raise IndexError("no such %s %s"%(classname, nodeid)) # check the uncommitted, destroyed nodes - if (self.destroyednodes.has_key(classname) and - self.destroyednodes[classname].has_key(nodeid)): - raise IndexError, "no such %s %s"%(classname, nodeid) + if (classname in self.destroyednodes and + nodeid in self.destroyednodes[classname]): + raise IndexError("no such %s %s"%(classname, nodeid)) # decode res = marshal.loads(db[nodeid]) @@ -360,11 +352,9 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): logging.getLogger('hyperdb').info('destroy %s%s'%(classname, nodeid)) # remove from cache and newnodes if it's there - if (self.cache.has_key(classname) and - self.cache[classname].has_key(nodeid)): + if (classname in self.cache and nodeid in self.cache[classname]): del self.cache[classname][nodeid] - if (self.newnodes.has_key(classname) and - self.newnodes[classname].has_key(nodeid)): + if (classname in self.newnodes and nodeid in self.newnodes[classname]): del self.newnodes[classname][nodeid] # see if there's any obvious commit actions that we should get rid of @@ -385,13 +375,13 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ properties = self.getclass(classname).getprops() d = {} - for k, v in node.items(): + for k, v in node.iteritems(): if k == self.RETIRED_FLAG: d[k] = v continue # if the property doesn't exist then we really don't care - if not properties.has_key(k): + if k not in properties: continue # get the property spec @@ -412,10 +402,10 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ properties = self.getclass(classname).getprops() d = {} - for k, v in node.items(): + for k, v in node.iteritems(): # if the property doesn't exist, or is the "retired" flag then # it won't be in the properties dict - if not properties.has_key(k): + if k not in properties: d[k] = v continue @@ -439,29 +429,27 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ # try the cache cache = self.cache.setdefault(classname, {}) - if cache.has_key(nodeid): + if nodeid in cache: return 1 # not in the cache - check the database if db is None: db = self.getclassdb(classname) - res = db.has_key(nodeid) - return res + return nodeid in db def countnodes(self, classname, db=None): count = 0 # include the uncommitted nodes - if self.newnodes.has_key(classname): + if classname in self.newnodes: count += len(self.newnodes[classname]) - if self.destroyednodes.has_key(classname): + if classname in self.destroyednodes: count -= len(self.destroyednodes[classname]) # and count those in the DB if db is None: db = self.getclassdb(classname) - count = count + len(db.keys()) - return count + return count + len(db) # @@ -529,14 +517,14 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): db = self.opendb('journals.%s'%classname, 'r') except anydbm.error, error: if str(error) == "need 'c' or 'n' flag to open new db": - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) elif error.args[0] != 2: # this isn't a "not found" error, be alarmed! raise if res: # we have unsaved journal entries, return them return res - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) try: journal = marshal.loads(db[nodeid]) except KeyError: @@ -544,7 +532,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): if res: # we have some unsaved journal entries, be happy! return res - raise IndexError, 'no such %s %s'%(classname, nodeid) + raise IndexError('no such %s %s'%(classname, nodeid)) db.close() # add all the saved journal entries for this node @@ -564,7 +552,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): db_type = self.determine_db_type(path) db = self.opendb(db_name, 'w') - for key in db.keys(): + for key in db: # get the journal for this db entry journal = marshal.loads(db[key]) l = [] @@ -617,7 +605,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): reindex[method(*args)] = 1 finally: # make sure we close all the database files - for db in self.databases.values(): + for db in self.databases.itervalues(): db.close() del self.databases @@ -627,7 +615,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): self.transactions = [] # reindex the nodes that request it - for classname, nodeid in filter(None, reindex.keys()): + for classname, nodeid in [k for k in reindex if k]: self.getclass(classname).index(nodeid) # save the indexer state @@ -648,7 +636,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ # get the database handle db_name = 'nodes.%s'%classname - if not self.databases.has_key(db_name): + if db_name not in self.databases: self.databases[db_name] = self.getclassdb(classname, 'c') return self.databases[db_name] @@ -666,7 +654,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): """ # get the database handle db_name = 'journals.%s'%classname - if not self.databases.has_key(db_name): + if db_name not in self.databases: self.databases[db_name] = self.opendb(db_name, 'c') return self.databases[db_name] @@ -691,7 +679,7 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): db = self.getCachedJournalDB(classname) # now insert the journal entry - if db.has_key(nodeid): + if nodeid in db: # append to existing s = db[nodeid] l = marshal.loads(s) @@ -716,12 +704,12 @@ class Database(FileStorage, hyperdb.Database, roundupdb.Database): def doDestroyNode(self, classname, nodeid): # delete from the class database db = self.getCachedClassDB(classname) - if db.has_key(nodeid): + if nodeid in db: del db[nodeid] # delete from the database db = self.getCachedJournalDB(classname) - if db.has_key(nodeid): + if nodeid in db: del db[nodeid] def rollback(self): @@ -785,7 +773,7 @@ class Class(hyperdb.Class): to modify the "creation" or "activity" properties cause a KeyError. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.fireAuditors('create', None, propvalues) newid = self.create_inner(**propvalues) self.fireReactors('create', newid, None) @@ -794,48 +782,49 @@ class Class(hyperdb.Class): def create_inner(self, **propvalues): """ Called by create, in-between the audit and react calls. """ - if propvalues.has_key('id'): - raise KeyError, '"id" is reserved' + if 'id' in propvalues: + raise KeyError('"id" is reserved') if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) - if propvalues.has_key('creation') or propvalues.has_key('activity'): - raise KeyError, '"creation" and "activity" are reserved' + if 'creation' in propvalues or 'activity' in propvalues: + raise KeyError('"creation" and "activity" are reserved') # new node's id newid = self.db.newid(self.classname) # validate propvalues num_re = re.compile('^\d+$') - for key, value in propvalues.items(): + for key, value in propvalues.iteritems(): if key == self.key: try: self.lookup(value) except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # try to handle this property try: prop = self.properties[key] except KeyError: - raise KeyError, '"%s" has no property "%s"'%(self.classname, - key) + raise KeyError('"%s" has no property "%s"'%(self.classname, + key)) if value is not None and isinstance(prop, hyperdb.Link): if type(value) != type(''): - raise ValueError, 'link value must be String' + raise ValueError('link value must be String') link_class = self.properties[key].classname # if it isn't a number, it's a key if not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, value, link_class) + raise IndexError('new property "%s": %s not a %s'%( + key, value, link_class)) elif not self.db.getclass(link_class).hasnode(value): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) # save off the value propvalues[key] = value @@ -849,22 +838,22 @@ class Class(hyperdb.Class): if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of ids'%key + raise TypeError('new property "%s" not an iterable of ids'%key) # clean up and validate the list of links link_class = self.properties[key].classname l = [] for entry in value: if type(entry) != type(''): - raise ValueError, '"%s" multilink value (%r) '\ - 'must contain Strings'%(key, value) + raise ValueError('"%s" multilink value (%r) '\ + 'must contain Strings'%(key, value)) # if it isn't a number, it's a key if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - key, entry, self.properties[key].classname) + raise IndexError('new property "%s": %s not a %s'%( + key, entry, self.properties[key].classname)) l.append(entry) value = l propvalues[key] = value @@ -872,8 +861,8 @@ class Class(hyperdb.Class): # handle additions for nodeid in value: if not self.db.getclass(link_class).hasnode(nodeid): - raise IndexError, '%s has no node %s'%(link_class, - nodeid) + raise IndexError('%s has no node %s'%(link_class, + nodeid)) # register the link with the newly linked node if self.do_journal and self.properties[key].do_journal: self.db.addjournal(link_class, nodeid, 'link', @@ -881,41 +870,41 @@ class Class(hyperdb.Class): elif isinstance(prop, hyperdb.String): if type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%key + raise TypeError('new property "%s" not a string'%key) if prop.indexme: self.db.indexer.add_text((self.classname, newid, key), value) elif isinstance(prop, hyperdb.Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%key + raise TypeError('new property "%s" not a Password'%key) elif isinstance(prop, hyperdb.Date): if value is not None and not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'%key + raise TypeError('new property "%s" not a Date'%key) elif isinstance(prop, hyperdb.Interval): if value is not None and not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an Interval'%key + raise TypeError('new property "%s" not an Interval'%key) elif value is not None and isinstance(prop, hyperdb.Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%key + raise TypeError('new property "%s" not numeric'%key) elif value is not None and isinstance(prop, hyperdb.Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%key + raise TypeError('new property "%s" not boolean'%key) # make sure there's data where there needs to be - for key, prop in self.properties.items(): - if propvalues.has_key(key): + for key, prop in self.properties.iteritems(): + if key in propvalues: continue if key == self.key: - raise ValueError, 'key property "%s" is required'%key + raise ValueError('key property "%s" is required'%key) if isinstance(prop, hyperdb.Multilink): propvalues[key] = [] @@ -946,10 +935,10 @@ class Class(hyperdb.Class): # check for one of the special props if propname == 'creation': - if d.has_key('creation'): + if 'creation' in d: return d['creation'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: return journal[0][1] @@ -957,10 +946,10 @@ class Class(hyperdb.Class): # on the strange chance that there's no journal return date.Date() if propname == 'activity': - if d.has_key('activity'): + if 'activity' in d: return d['activity'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: return self.db.getjournal(self.classname, nodeid)[-1][1] @@ -968,10 +957,10 @@ class Class(hyperdb.Class): # on the strange chance that there's no journal return date.Date() if propname == 'creator': - if d.has_key('creator'): + if 'creator' in d: return d['creator'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: num_re = re.compile('^\d+$') @@ -988,10 +977,10 @@ class Class(hyperdb.Class): else: return self.db.getuid() if propname == 'actor': - if d.has_key('actor'): + if 'actor' in d: return d['actor'] if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') journal = self.db.getjournal(self.classname, nodeid) if journal: num_re = re.compile('^\d+$') @@ -1011,7 +1000,7 @@ class Class(hyperdb.Class): # get the property (raises KeyErorr if invalid) prop = self.properties[propname] - if not d.has_key(propname): + if propname not in d: if default is _marker: if isinstance(prop, hyperdb.Multilink): return [] @@ -1048,12 +1037,12 @@ class Class(hyperdb.Class): to modify the "creation" or "activity" properties cause a KeyError. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.fireAuditors('set', nodeid, propvalues) oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid)) - for name,prop in self.getprops(protected=0).items(): - if oldvalues.has_key(name): + for name, prop in self.getprops(protected=0).iteritems(): + if name in oldvalues: continue if isinstance(prop, hyperdb.Multilink): oldvalues[name] = [] @@ -1069,24 +1058,25 @@ class Class(hyperdb.Class): if not propvalues: return propvalues - if propvalues.has_key('creation') or propvalues.has_key('activity'): + if 'creation' in propvalues or 'activity' in propvalues: raise KeyError, '"creation" and "activity" are reserved' - if propvalues.has_key('id'): + if 'id' in propvalues: raise KeyError, '"id" is reserved' if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: raise IndexError num_re = re.compile('^\d+$') # if the journal value is to be different, store it in here journalvalues = {} - for propname, value in propvalues.items(): + # list() propvalues 'cos it might be modified by the loop + for propname, value in list(propvalues.items()): # check to make sure we're not duplicating an existing key if propname == self.key and node[propname] != value: try: @@ -1094,7 +1084,7 @@ class Class(hyperdb.Class): except KeyError: pass else: - raise ValueError, 'node with key "%s" exists'%value + raise ValueError('node with key "%s" exists'%value) # this will raise the KeyError if the property isn't valid # ... we don't use getprops() here because we only care about @@ -1102,8 +1092,8 @@ class Class(hyperdb.Class): try: prop = self.properties[propname] except KeyError: - raise KeyError, '"%s" has no property named "%s"'%( - self.classname, propname) + raise KeyError('"%s" has no property named "%s"'%( + self.classname, propname)) # if the value's the same as the existing value, no sense in # doing anything @@ -1118,22 +1108,23 @@ class Class(hyperdb.Class): link_class = prop.classname # if it isn't a number, it's a key if value is not None and not isinstance(value, type('')): - raise ValueError, 'property "%s" link value be a string'%( - propname) + raise ValueError('property "%s" link value be a string'%( + propname)) if isinstance(value, type('')) and not num_re.match(value): try: value = self.db.classes[link_class].lookup(value) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( - propname, value, prop.classname) + raise IndexError('new property "%s": %s not a %s'%( + propname, value, prop.classname)) if (value is not None and not self.db.getclass(link_class).hasnode(value)): - raise IndexError, '%s has no node %s'%(link_class, value) + raise IndexError('%s has no node %s'%(link_class, + value)) if self.do_journal and prop.do_journal: # register the unlink with the old linked node - if node.has_key(propname) and node[propname] is not None: + if propname in node and node[propname] is not None: self.db.addjournal(link_class, node[propname], 'unlink', (self.classname, nodeid, propname)) @@ -1146,22 +1137,22 @@ class Class(hyperdb.Class): if value is None: value = [] if not hasattr(value, '__iter__'): - raise TypeError, 'new property "%s" not an iterable of'\ - ' ids'%propname + raise TypeError('new property "%s" not an iterable of' + ' ids'%propname) link_class = self.properties[propname].classname l = [] for entry in value: # if it isn't a number, it's a key if type(entry) != type(''): - raise ValueError, 'new property "%s" link value ' \ - 'must be a string'%propname + raise ValueError('new property "%s" link value ' + 'must be a string'%propname) if not num_re.match(entry): try: entry = self.db.classes[link_class].lookup(entry) except (TypeError, KeyError): - raise IndexError, 'new property "%s": %s not a %s'%( + raise IndexError('new property "%s": %s not a %s'%( propname, entry, - self.properties[propname].classname) + self.properties[propname].classname)) l.append(entry) value = l propvalues[propname] = value @@ -1171,7 +1162,7 @@ class Class(hyperdb.Class): remove = [] # handle removals - if node.has_key(propname): + if propname in node: l = node[propname] else: l = [] @@ -1188,7 +1179,8 @@ class Class(hyperdb.Class): # handle additions for id in value: if not self.db.getclass(link_class).hasnode(id): - raise IndexError, '%s has no node %s'%(link_class, id) + raise IndexError('%s has no node %s'%(link_class, + id)) if id in l: continue # register the link with the newly linked node @@ -1209,38 +1201,43 @@ class Class(hyperdb.Class): elif isinstance(prop, hyperdb.String): if value is not None and type(value) != type('') and type(value) != type(u''): - raise TypeError, 'new property "%s" not a string'%propname + raise TypeError('new property "%s" not a ' + 'string'%propname) if prop.indexme: self.db.indexer.add_text((self.classname, nodeid, propname), value) elif isinstance(prop, hyperdb.Password): if not isinstance(value, password.Password): - raise TypeError, 'new property "%s" not a Password'%propname + raise TypeError('new property "%s" not a ' + 'Password'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, hyperdb.Date): if not isinstance(value, date.Date): - raise TypeError, 'new property "%s" not a Date'% propname + raise TypeError('new property "%s" not a ' + 'Date'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, hyperdb.Interval): if not isinstance(value, date.Interval): - raise TypeError, 'new property "%s" not an '\ - 'Interval'%propname + raise TypeError('new property "%s" not an ' + 'Interval'%propname) propvalues[propname] = value elif value is not None and isinstance(prop, hyperdb.Number): try: float(value) except ValueError: - raise TypeError, 'new property "%s" not numeric'%propname + raise TypeError('new property "%s" not ' + 'numeric'%propname) elif value is not None and isinstance(prop, hyperdb.Boolean): try: int(value) except ValueError: - raise TypeError, 'new property "%s" not boolean'%propname + raise TypeError('new property "%s" not ' + 'boolean'%propname) node[propname] = value @@ -1273,7 +1270,7 @@ class Class(hyperdb.Class): to modify the "creation" or "activity" properties cause a KeyError. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.fireAuditors('retire', nodeid, None) @@ -1291,7 +1288,7 @@ class Class(hyperdb.Class): Make node available for all operations like it was before retirement. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) node = self.db.getnode(self.classname, nodeid) # check if key property was overrided @@ -1301,8 +1298,8 @@ class Class(hyperdb.Class): except KeyError: pass else: - raise KeyError, "Key property (%s) of retired node clashes with \ - existing one (%s)" % (key, node[key]) + raise KeyError("Key property (%s) of retired node clashes " + "with existing one (%s)" % (key, node[key])) # Now we can safely restore node self.fireAuditors('restore', nodeid, None) del node[self.db.RETIRED_FLAG] @@ -1316,7 +1313,7 @@ class Class(hyperdb.Class): """Return true if the node is retired. """ node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: return 1 return 0 @@ -1337,7 +1334,7 @@ class Class(hyperdb.Class): support the session storage of the cgi interface. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) self.db.destroynode(self.classname, nodeid) def history(self, nodeid): @@ -1354,7 +1351,7 @@ class Class(hyperdb.Class): 'tag' is the journaltag specified when the database was opened. """ if not self.do_journal: - raise ValueError, 'Journalling is disabled for this class' + raise ValueError('Journalling is disabled for this class') return self.db.getjournal(self.classname, nodeid) # Locating nodes: @@ -1373,7 +1370,7 @@ class Class(hyperdb.Class): """ prop = self.getprops()[propname] if not isinstance(prop, hyperdb.String): - raise TypeError, 'key properties must be String' + raise TypeError('key properties must be String') self.key = propname def getkey(self): @@ -1390,21 +1387,22 @@ class Class(hyperdb.Class): otherwise a KeyError is raised. """ if not self.key: - raise TypeError, 'No key property set for class %s'%self.classname + raise TypeError('No key property set for ' + 'class %s'%self.classname) cldb = self.db.getclassdb(self.classname) try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue - if not node.has_key(self.key): + if self.key not in node: continue if node[self.key] == keyvalue: return nodeid finally: cldb.close() - raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key, - keyvalue, self.classname) + raise KeyError('No key (%s) value "%s" for "%s"'%(self.key, + keyvalue, self.classname)) # change from spec - allows multiple props to match def find(self, **propspec): @@ -1422,12 +1420,12 @@ class Class(hyperdb.Class): db.issue.find(messages='1') db.issue.find(messages={'1':1,'3':1}, files={'7':1}) """ - propspec = propspec.items() - for propname, itemids in propspec: + for propname, itemids in propspec.iteritems(): # check the prop is OK prop = self.properties[propname] if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink): - raise TypeError, "'%s' not a Link/Multilink property"%propname + raise TypeError("'%s' not a Link/Multilink " + "property"%propname) # ok, now do the find cldb = self.db.getclassdb(self.classname) @@ -1435,15 +1433,15 @@ class Class(hyperdb.Class): try: for id in self.getnodeids(db=cldb): item = self.db.getnode(self.classname, id, db=cldb) - if item.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in item: continue - for propname, itemids in propspec: + for propname, itemids in propspec.iteritems(): if type(itemids) is not type({}): itemids = {itemids:1} # special case if the item doesn't have this property - if not item.has_key(propname): - if itemids.has_key(None): + if propname not in item: + if None in itemids: l.append(id) break continue @@ -1451,13 +1449,13 @@ class Class(hyperdb.Class): # grab the property definition and its value on this item prop = self.properties[propname] value = item[propname] - if isinstance(prop, hyperdb.Link) and itemids.has_key(value): + if isinstance(prop, hyperdb.Link) and value in itemids: l.append(id) break elif isinstance(prop, hyperdb.Multilink): hit = 0 for v in value: - if itemids.has_key(v): + if v in itemids: l.append(id) hit = 1 break @@ -1475,20 +1473,20 @@ class Class(hyperdb.Class): The return is a list of the id of all nodes that match. """ - for propname in requirements.keys(): + for propname in requirements: prop = self.properties[propname] if not isinstance(prop, hyperdb.String): - raise TypeError, "'%s' not a String property"%propname + raise TypeError("'%s' not a String property"%propname) requirements[propname] = requirements[propname].lower() l = [] cldb = self.db.getclassdb(self.classname) try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(self.classname, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue - for key, value in requirements.items(): - if not node.has_key(key): + for key, value in requirements.iteritems(): + if key not in node: break if node[key] is None or node[key].lower() != value: break @@ -1507,7 +1505,7 @@ class Class(hyperdb.Class): try: for nodeid in self.getnodeids(cldb): node = self.db.getnode(cn, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue l.append(nodeid) finally: @@ -1524,20 +1522,20 @@ class Class(hyperdb.Class): res = [] # start off with the new nodes - if self.db.newnodes.has_key(self.classname): - res += self.db.newnodes[self.classname].keys() + if self.classname in self.db.newnodes: + res.extend(self.db.newnodes[self.classname]) must_close = False if db is None: db = self.db.getclassdb(self.classname) must_close = True try: - res = res + db.keys() + res.extend(db) # remove the uncommitted, destroyed nodes - if self.db.destroyednodes.has_key(self.classname): - for nodeid in self.db.destroyednodes[self.classname].keys(): - if db.has_key(nodeid): + if self.classname in self.db.destroyednodes: + for nodeid in self.db.destroyednodes[self.classname]: + if nodeid in db: res.remove(nodeid) # check retired flag @@ -1545,7 +1543,7 @@ class Class(hyperdb.Class): l = [] for nodeid in res: node = self.db.getnode(self.classname, nodeid, db) - is_ret = node.has_key(self.db.RETIRED_FLAG) + is_ret = self.db.RETIRED_FLAG in node if retired == is_ret: l.append(nodeid) res = l @@ -1588,7 +1586,7 @@ class Class(hyperdb.Class): INTERVAL = 'spec:interval' OTHER = 'spec:other' - for k, v in filterspec.items(): + for k, v in filterspec.iteritems(): propclass = props[k] if isinstance(propclass, hyperdb.Link): if type(v) is not type([]): @@ -1662,7 +1660,7 @@ class Class(hyperdb.Class): # TODO: only full-scan once (use items()) for nodeid in self.getnodeids(cldb): node = self.db.getnode(cn, nodeid, cldb) - if node.has_key(self.db.RETIRED_FLAG): + if self.db.RETIRED_FLAG in node: continue # apply filter for t, k, v in filterspec: @@ -1758,7 +1756,7 @@ class Class(hyperdb.Class): try: v = item[prop] except KeyError: - if JPROPS.has_key(prop): + if prop in JPROPS: # force lookup of the special journal prop v = self.get(itemid, prop) else: @@ -1787,7 +1785,7 @@ class Class(hyperdb.Class): key = link.orderprop() child = pt.propdict[key] if key!='id': - if not lcache.has_key(v): + if v not in lcache: # open the link class db if it's not already if lcldb is None: lcldb = self.db.getclassdb(lcn) @@ -1852,15 +1850,15 @@ class Class(hyperdb.Class): may collide with the names of existing properties, or a ValueError is raised before any properties have been added. """ - for key in properties.keys(): - if self.properties.has_key(key): - raise ValueError, key + for key in properties: + if key in self.properties: + raise ValueError(key) self.properties.update(properties) def index(self, nodeid): """ Add (or refresh) the node to search indexes """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if isinstance(propclass, hyperdb.String) and propclass.indexme: # index them under (classname, nodeid, property) try: @@ -1907,7 +1905,7 @@ class Class(hyperdb.Class): Return the nodeid of the node imported. """ if self.db.journaltag is None: - raise hyperdb.DatabaseError, _('Database open read-only') + raise hyperdb.DatabaseError(_('Database open read-only')) properties = self.getprops() # make the new node's property map @@ -1967,8 +1965,8 @@ class Class(hyperdb.Class): date = date.get_tuple() if action == 'set': export_data = {} - for propname, value in params.items(): - if not properties.has_key(propname): + for propname, value in params.iteritems(): + if propname not in properties: # property no longer in the schema continue @@ -1988,8 +1986,8 @@ class Class(hyperdb.Class): value = str(value) export_data[propname] = value params = export_data - l = [nodeid, date, user, action, params] - r.append(map(repr, l)) + r.append([repr(nodeid), repr(date), repr(user), + repr(action), repr(params)]) return r def import_journals(self, entries): @@ -1999,11 +1997,10 @@ class Class(hyperdb.Class): properties = self.getprops() d = {} for l in entries: - l = map(eval, l) - nodeid, jdate, user, action, params = l + nodeid, jdate, user, action, params = tuple(map(eval, l)) r = d.setdefault(nodeid, []) if action == 'set': - for propname, value in params.items(): + for propname, value in params.iteritems(): prop = properties[propname] if value is None: pass @@ -2018,7 +2015,7 @@ class Class(hyperdb.Class): params[propname] = value r.append((nodeid, date.Date(jdate), user, action, params)) - for nodeid, l in d.items(): + for nodeid, l in d.iteritems(): self.db.setjournal(self.classname, nodeid, l) class FileClass(hyperdb.FileClass, Class): @@ -2034,9 +2031,9 @@ class FileClass(hyperdb.FileClass, Class): """The newly-created class automatically includes the "content" and "type" properties. """ - if not properties.has_key('content'): + if 'content' not in properties: properties['content'] = hyperdb.String(indexme='yes') - if not properties.has_key('type'): + if 'type' not in properties: properties['type'] = hyperdb.String() Class.__init__(self, db, classname, **properties) @@ -2074,7 +2071,7 @@ class FileClass(hyperdb.FileClass, Class): if propname == 'content': try: return self.db.getfile(self.classname, nodeid, None) - except IOError, (strerror): + except IOError, strerror: # XXX by catching this we don't see an error in the log. return 'ERROR reading file: %s%s\n%s\n%s'%( self.classname, nodeid, poss_msg, strerror) @@ -2090,8 +2087,8 @@ class FileClass(hyperdb.FileClass, Class): # create the oldvalues dict - fill in any missing values oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid)) - for name,prop in self.getprops(protected=0).items(): - if oldvalues.has_key(name): + for name, prop in self.getprops(protected=0).iteritems(): + if name in oldvalues: continue if isinstance(prop, hyperdb.Multilink): oldvalues[name] = [] @@ -2100,7 +2097,7 @@ class FileClass(hyperdb.FileClass, Class): # now remove the content property so it's not stored in the db content = None - if propvalues.has_key('content'): + if 'content' in propvalues: content = propvalues['content'] del propvalues['content'] @@ -2127,7 +2124,7 @@ class FileClass(hyperdb.FileClass, Class): Use the content-type property for the content property. """ # find all the String properties that have indexme - for prop, propclass in self.getprops().items(): + for prop, propclass in self.getprops().iteritems(): if prop == 'content' and propclass.indexme: mime_type = self.get(nodeid, 'type', self.default_mime_type) self.db.indexer.add_text((self.classname, nodeid, 'content'), @@ -2150,17 +2147,17 @@ class IssueClass(Class, roundupdb.IssueClass): dictionary attempts to specify any of these properties or a "creation" or "activity" property, a ValueError is raised. """ - if not properties.has_key('title'): + if 'title' not in properties: properties['title'] = hyperdb.String(indexme='yes') - if not properties.has_key('messages'): + if 'messages' not in properties: properties['messages'] = hyperdb.Multilink("msg") - if not properties.has_key('files'): + if 'files' not in properties: properties['files'] = hyperdb.Multilink("file") - if not properties.has_key('nosy'): + if 'nosy' not in properties: # note: journalling is turned off as it really just wastes # space. this behaviour may be overridden in an instance properties['nosy'] = hyperdb.Multilink("user", do_journal="no") - if not properties.has_key('superseder'): + if 'superseder' not in properties: properties['superseder'] = hyperdb.Multilink(classname) Class.__init__(self, db, classname, **properties) diff --git a/roundup/backends/back_postgresql.py b/roundup/backends/back_postgresql.py index 9e9d278..e066083 100644 --- a/roundup/backends/back_postgresql.py +++ b/roundup/backends/back_postgresql.py @@ -27,9 +27,9 @@ from roundup.backends import sessions_rdbms def connection_dict(config, dbnamestr=None): ''' read_default_group is MySQL-specific, ignore it ''' d = rdbms_common.connection_dict(config, dbnamestr) - if d.has_key('read_default_group'): + if 'read_default_group' in d: del d['read_default_group'] - if d.has_key('read_default_file'): + if 'read_default_file' in d: del d['read_default_file'] return d @@ -58,7 +58,7 @@ def db_command(config, command): try: conn = psycopg.connect(**template1) except psycopg.OperationalError, message: - raise hyperdb.DatabaseError, message + raise hyperdb.DatabaseError(message) conn.set_isolation_level(0) cursor = conn.cursor() @@ -68,7 +68,7 @@ def db_command(config, command): return finally: conn.close() - raise RuntimeError, '10 attempts to create database failed' + raise RuntimeError('10 attempts to create database failed') def pg_command(cursor, command): '''Execute the postgresql command, which may be blocked by some other @@ -81,7 +81,7 @@ def pg_command(cursor, command): except psycopg.ProgrammingError, err: response = str(err).split('\n')[0] if response.find('FATAL') != -1: - raise RuntimeError, response + raise RuntimeError(response) else: msgs = [ 'is being accessed by other users', @@ -94,7 +94,7 @@ def pg_command(cursor, command): if can_retry: time.sleep(1) return 0 - raise RuntimeError, response + raise RuntimeError(response) return 1 def db_exists(config): @@ -135,7 +135,7 @@ class Database(rdbms_common.Database): try: conn = psycopg.connect(**db) except psycopg.OperationalError, message: - raise hyperdb.DatabaseError, message + raise hyperdb.DatabaseError(message) cursor = conn.cursor() @@ -209,7 +209,7 @@ class Database(rdbms_common.Database): def add_actor_column(self): # update existing tables to have the new actor column tables = self.database_schema['tables'] - for name in tables.keys(): + for name in tables: self.sql('ALTER TABLE _%s add __actor VARCHAR(255)'%name) def __repr__(self): @@ -271,7 +271,7 @@ class Database(rdbms_common.Database): rdbms_common.Database.clear(self) # reset the sequences - for cn in self.classes.keys(): + for cn in self.classes: self.cursor.execute('DROP SEQUENCE _%s_ids'%cn) self.cursor.execute('CREATE SEQUENCE _%s_ids'%cn) diff --git a/roundup/backends/back_sqlite.py b/roundup/backends/back_sqlite.py index 245240f..c947eb3 100644 --- a/roundup/backends/back_sqlite.py +++ b/roundup/backends/back_sqlite.py @@ -160,7 +160,7 @@ class Database(rdbms_common.Database): # update existing tables to have the new actor column tables = self.database_schema['tables'] for classname, spec in self.classes.items(): - if tables.has_key(classname): + if classname in tables: dbspec = tables[classname] self.update_class(spec, dbspec, force=1, adding_v2=1) # we've updated - don't try again @@ -179,7 +179,6 @@ class Database(rdbms_common.Database): SQLite doesn't have ALTER TABLE, so we have to copy and regenerate the tables with the new schema. """ - new_has = spec.properties.has_key new_spec = spec.schema() new_spec[1].sort() old_spec[1].sort() @@ -193,14 +192,13 @@ class Database(rdbms_common.Database): old_has = {} for name, prop in old_spec[1]: old_has[name] = 1 - if new_has(name) or not isinstance(prop, hyperdb.Multilink): + if name in spec.properties or not isinstance(prop, hyperdb.Multilink): continue # it's a multilink, and it's been removed - drop the old # table. First drop indexes. self.drop_multilink_table_indexes(spec.classname, name) sql = 'drop table %s_%s'%(spec.classname, prop) self.sql(sql) - old_has = old_has.has_key # now figure how we populate the new table if adding_v2: @@ -211,7 +209,7 @@ class Database(rdbms_common.Database): for propname,x in new_spec[1]: prop = properties[propname] if isinstance(prop, hyperdb.Multilink): - if not old_has(propname): + if propname not in old_has: # we need to create the new table self.create_multilink_table(spec, propname) elif force: @@ -232,7 +230,7 @@ class Database(rdbms_common.Database): (%s, %s)"""%(tn, self.arg, self.arg) for linkid, nodeid in rows: self.sql(sql, (int(linkid), int(nodeid))) - elif old_has(propname): + elif propname in old_has: # we copy this col over from the old table fetch.append('_'+propname) @@ -263,7 +261,7 @@ class Database(rdbms_common.Database): elif isinstance(prop, hyperdb.Interval): inscols.append('_'+propname) inscols.append('__'+propname+'_int__') - elif old_has(propname): + elif propname in old_has: # we copy this col over from the old table inscols.append('_'+propname) @@ -283,7 +281,7 @@ class Database(rdbms_common.Database): v = hyperdb.Interval(entry[name]).as_seconds() except IndexError: v = None - elif entry.has_key(name): + elif name in entry: v = hyperdb.Interval(entry[name]).as_seconds() else: v = None @@ -292,7 +290,7 @@ class Database(rdbms_common.Database): v = entry[name] except IndexError: v = None - elif (sqlite_version == 1 and entry.has_key(name)): + elif (sqlite_version == 1 and name in entry): v = entry[name] else: v = None @@ -397,8 +395,8 @@ class sqliteClass: """ If there's NO matches to a fetch, sqlite returns NULL instead of nothing """ - return filter(None, rdbms_common.Class.filter(self, search_matches, - filterspec, sort=sort, group=group)) + return [f for f in rdbms_common.Class.filter(self, search_matches, + filterspec, sort=sort, group=group) if f] class Class(sqliteClass, rdbms_common.Class): pass diff --git a/roundup/backends/indexer_common.py b/roundup/backends/indexer_common.py index e5429d3..9343127 100644 --- a/roundup/backends/indexer_common.py +++ b/roundup/backends/indexer_common.py @@ -43,7 +43,7 @@ class Indexer: return {} designator_propname = {} - for nm, propclass in klass.getprops().items(): + for nm, propclass in klass.getprops().iteritems(): if _isLink(propclass): designator_propname.setdefault(propclass.classname, []).append(nm) @@ -52,7 +52,7 @@ class Indexer: # and files nodeids = {} # this is the answer propspec = {} # used to do the klass.find - for l in designator_propname.values(): + for l in designator_propname.itervalues(): for propname in l: propspec[propname] = {} # used as a set (value doesn't matter) @@ -61,7 +61,7 @@ class Indexer: # skip this result if we don't care about this class/property classname = entry[0] property = entry[2] - if ignore.has_key((classname, property)): + if (classname, property) in ignore: continue # if it's a property on klass, it's easy @@ -69,12 +69,12 @@ class Indexer: # backends as that can cause problems down the track) nodeid = str(entry[1]) if classname == klass.classname: - if not nodeids.has_key(nodeid): + if nodeid not in nodeids: nodeids[nodeid] = {} continue # make sure the class is a linked one, otherwise ignore - if not designator_propname.has_key(classname): + if classname not in designator_propname: continue # it's a linked class - set up to do the klass.find @@ -82,7 +82,7 @@ class Indexer: propspec[linkprop][nodeid] = 1 # retain only the meaningful entries - for propname, idset in propspec.items(): + for propname, idset in propspec.iteritems(): if not idset: del propspec[propname] @@ -95,16 +95,16 @@ class Indexer: nodeids[resid] = {} node_dict = nodeids[resid] # now figure out where it came from - for linkprop in propspec.keys(): + for linkprop in propspec: v = klass.get(resid, linkprop) # the link might be a Link so deal with a single result or None if isinstance(propdefs[linkprop], hyperdb.Link): if v is None: continue v = [v] for nodeid in v: - if propspec[linkprop].has_key(nodeid): + if nodeid in propspec[linkprop]: # OK, this node[propname] has a winner - if not node_dict.has_key(linkprop): + if linkprop not in node_dict: node_dict[linkprop] = [nodeid] else: node_dict[linkprop].append(nodeid) diff --git a/roundup/backends/indexer_dbm.py b/roundup/backends/indexer_dbm.py index 5b166b5..0b8562c 100644 --- a/roundup/backends/indexer_dbm.py +++ b/roundup/backends/indexer_dbm.py @@ -81,7 +81,7 @@ class Indexer(IndexerBase): self.load_index() # remove old entries for this identifier - if self.files.has_key(identifier): + if identifier in self.files: self.purge_entry(identifier) # split into words @@ -99,15 +99,15 @@ class Indexer(IndexerBase): for word in words: if self.is_stopword(word): continue - if filedict.has_key(word): + if word in filedict: filedict[word] = filedict[word]+1 else: filedict[word] = 1 # now add to the totals - for word in filedict.keys(): + for word in filedict: # each word has a dict of {identifier: count} - if self.words.has_key(word): + if word in self.words: entry = self.words[word] else: # new word @@ -162,18 +162,18 @@ class Indexer(IndexerBase): return {} if hits is None: hits = {} - for k in entry.keys(): - if not self.fileids.has_key(k): - raise ValueError, 'Index is corrupted: re-generate it' + for k in entry: + if k not in self.fileids: + raise ValueError('Index is corrupted: re-generate it') hits[k] = self.fileids[k] else: # Eliminate hits for every non-match - for fileid in hits.keys(): - if not entry.has_key(fileid): + for fileid in hits: + if fileid not in entry: del hits[fileid] if hits is None: return {} - return hits.values() + return list(hits.values()) segments = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#_-!" def load_index(self, reload=0, wordlist=None): @@ -205,7 +205,7 @@ class Indexer(IndexerBase): dbslice = marshal.loads(pickle_str) if dbslice.get('WORDS'): # if it has some words, add them - for word, entry in dbslice['WORDS'].items(): + for word, entry in dbslice['WORDS'].iteritems(): db['WORDS'][word] = entry if dbslice.get('FILES'): # if it has some files, add them @@ -241,7 +241,7 @@ class Indexer(IndexerBase): segdicts = {} # Need batch of empty dicts for segment in letters: segdicts[segment] = {} - for word, entry in self.words.items(): # Split into segment dicts + for word, entry in self.words.iteritems(): # Split into segment dicts initchar = word[0].upper() segdicts[initchar][word] = entry @@ -262,7 +262,7 @@ class Indexer(IndexerBase): ''' self.load_index() - if not self.files.has_key(identifier): + if identifier not in self.files: return file_index = self.files[identifier][0] @@ -270,8 +270,8 @@ class Indexer(IndexerBase): del self.fileids[file_index] # The much harder part, cleanup the word index - for key, occurs in self.words.items(): - if occurs.has_key(file_index): + for key, occurs in self.words.iteritems(): + if file_index in occurs: del occurs[file_index] # save needed diff --git a/roundup/backends/indexer_rdbms.py b/roundup/backends/indexer_rdbms.py index 70e522b..a5886ff 100644 --- a/roundup/backends/indexer_rdbms.py +++ b/roundup/backends/indexer_rdbms.py @@ -129,7 +129,7 @@ class Indexer(IndexerBase): sql = sql%(' '.join(join_list), self.db.arg, ' '.join(match_list)) self.db.cursor.execute(sql, l) - r = map(lambda x: x[0], self.db.cursor.fetchall()) + r = [x[0] for x in self.db.cursor.fetchall()] if not r: return [] diff --git a/roundup/backends/locking.py b/roundup/backends/locking.py index 4fe955f..5702609 100644 --- a/roundup/backends/locking.py +++ b/roundup/backends/locking.py @@ -28,12 +28,11 @@ It should work on Unix and Windows. ''' __docformat__ = 'restructuredtext' -import portalocker +from roundup.backends import portalocker def acquire_lock(path, block=1): '''Acquire a lock for the given path ''' - import portalocker file = open(path, 'w') if block: portalocker.lock(file, portalocker.LOCK_EX) diff --git a/roundup/backends/portalocker.py b/roundup/backends/portalocker.py index 30957c0..18b2748 100644 --- a/roundup/backends/portalocker.py +++ b/roundup/backends/portalocker.py @@ -136,10 +136,9 @@ elif os.name =='posix': if __name__ == '__main__': from time import time, strftime, localtime import sys - import portalocker log = open('log.txt', "a+") - portalocker.lock(log, portalocker.LOCK_EX) + lock(log, LOCK_EX) timestamp = strftime("%m/%d/%Y %H:%M:%S\n", localtime(time())) log.write( timestamp )