summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: a845348)
raw | patch | inline | side by side (parent: a845348)
author | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 22 Feb 2010 05:26:57 +0000 (05:26 +0000) | ||
committer | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 22 Feb 2010 05:26:57 +0000 (05:26 +0000) |
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk@4463 57a73879-2fb5-44c3-a270-3262357dd7e2
16 files changed:
diff --git a/2to3-done.txt b/2to3-done.txt
--- /dev/null
+++ b/2to3-done.txt
@@ -0,0 +1,156 @@
+CAN'T VERIFY
+
+./roundup/backends/back_mysql.py
+./roundup/backends/back_tsearch2.py
+./roundup/backends/indexer_xapian.py
+
+TODO
+
+./roundup/backends/rdbms_common.py
+./roundup/backends/sessions_dbm.py
+./roundup/backends/sessions_rdbms.py
+./roundup/backends/tsearch2_setup.py
+./roundup/cgi/__init__.py
+./roundup/cgi/accept_language.py
+./roundup/cgi/actions.py
+./roundup/cgi/apache.py
+./roundup/cgi/cgitb.py
+./roundup/cgi/client.py
+./roundup/cgi/exceptions.py
+./roundup/cgi/form_parser.py
+./roundup/cgi/MultiMapping.py
+./roundup/cgi/PageTemplates/__init__.py
+./roundup/cgi/PageTemplates/Expressions.py
+./roundup/cgi/PageTemplates/GlobalTranslationService.py
+./roundup/cgi/PageTemplates/MultiMapping.py
+./roundup/cgi/PageTemplates/PageTemplate.py
+./roundup/cgi/PageTemplates/PathIterator.py
+./roundup/cgi/PageTemplates/PythonExpr.py
+./roundup/cgi/PageTemplates/TALES.py
+./roundup/cgi/TAL/__init__.py
+./roundup/cgi/TAL/DummyEngine.py
+./roundup/cgi/TAL/HTMLParser.py
+./roundup/cgi/TAL/HTMLTALParser.py
+./roundup/cgi/TAL/markupbase.py
+./roundup/cgi/TAL/TALDefs.py
+./roundup/cgi/TAL/TALGenerator.py
+./roundup/cgi/TAL/talgettext.py
+./roundup/cgi/TAL/TALInterpreter.py
+./roundup/cgi/TAL/TALParser.py
+./roundup/cgi/TAL/TranslationContext.py
+./roundup/cgi/TAL/XMLParser.py
+./roundup/cgi/templating.py
+./roundup/cgi/TranslationService.py
+./roundup/cgi/wsgi_handler.py
+./roundup/cgi/zLOG.py
+./roundup/cgi/ZTUtils/__init__.py
+./roundup/cgi/ZTUtils/Batch.py
+./roundup/cgi/ZTUtils/Iterator.py
+./roundup/configuration.py
+./roundup/date.py
+./roundup/dist/__init__.py
+./roundup/dist/command/__init__.py
+./roundup/dist/command/bdist_rpm.py
+./roundup/dist/command/build.py
+./roundup/dist/command/build_doc.py
+./roundup/dist/command/build_py.py
+./roundup/dist/command/build_scripts.py
+./roundup/exceptions.py
+./roundup/hyperdb.py
+./roundup/i18n.py
+./roundup/init.py
+./roundup/install_util.py
+./roundup/instance.py
+./roundup/mailer.py
+./roundup/mailgw.py
+./roundup/msgfmt.py
+./roundup/password.py
+./roundup/rfc2822.py
+./roundup/roundupdb.py
+./roundup/scripts/__init__.py
+./roundup/scripts/roundup_admin.py
+./roundup/scripts/roundup_demo.py
+./roundup/scripts/roundup_gettext.py
+./roundup/scripts/roundup_mailgw.py
+./roundup/scripts/roundup_server.py
+./roundup/scripts/roundup_xmlrpc_server.py
+./roundup/security.py
+./roundup/support.py
+./roundup/token.py
+./roundup/version_check.py
+./roundup/xmlrpc.py
+./run_tests.py
+./scripts/copy-user.py
+./scripts/imapServer.py
+./scripts/import_sf.py
+./scripts/schema_diagram.py
+./setup.py
+./share/roundup/templates/classic/detectors/messagesummary.py
+./share/roundup/templates/classic/detectors/nosyreaction.py
+./share/roundup/templates/classic/detectors/statusauditor.py
+./share/roundup/templates/classic/detectors/userauditor.py
+./share/roundup/templates/classic/initial_data.py
+./share/roundup/templates/classic/schema.py
+./share/roundup/templates/minimal/detectors/userauditor.py
+./share/roundup/templates/minimal/initial_data.py
+./share/roundup/templates/minimal/schema.py
+./test/__init__.py
+./test/benchmark.py
+./test/db_test_base.py
+./test/memorydb.py
+./test/mocknull.py
+./test/session_common.py
+./test/test_actions.py
+./test/test_anydbm.py
+./test/test_anypy_hashlib.py
+./test/test_cgi.py
+./test/test_dates.py
+./test/test_hyperdbvals.py
+./test/test_indexer.py
+./test/test_locking.py
+./test/test_mailer.py
+./test/test_mailgw.py
+./test/test_mailsplit.py
+./test/test_memorydb.py
+./test/test_multipart.py
+./test/test_mysql.py
+./test/test_postgresql.py
+./test/test_rfc2822.py
+./test/test_schema.py
+./test/test_security.py
+./test/test_sqlite.py
+./test/test_templating.py
+./test/test_textfmt.py
+./test/test_token.py
+./test/test_tsearch2.py
+./test/test_userauditor.py
+./test/test_xmlrpc.py
+./test.py
+./tools/fixroles.py
+./tools/load_tracker.py
+./tools/migrate-queries.py
+./tools/pygettext.py
+./frontends/ZRoundup/__init__.py
+./frontends/ZRoundup/ZRoundup.py
+
+
+
+DONE
+
+./doc/conf.py
+./roundup/__init__.py
+./roundup/admin.py
+./roundup/actions.py
+./roundup/anypy/__init__.py
+./roundup/anypy/hashlib_.py
+./roundup/anypy/sets_.py
+./roundup/backends/__init__.py
+./roundup/backends/back_anydbm.py
+./roundup/backends/back_postgresql.py
+./roundup/backends/back_sqlite.py
+./roundup/backends/blobfiles.py
+./roundup/backends/indexer_common.py
+./roundup/backends/indexer_dbm.py
+./roundup/backends/indexer_rdbms.py
+./roundup/backends/locking.py
+./roundup/backends/portalocker.py
diff --git a/CHANGES.txt b/CHANGES.txt
index 68d8b7b2cacc73d455eaa56b5dee53a2ec9fb2e8..3e8dfd9f1a30f934fbe5c451fa3eace7ca8f1f94 100644 (file)
--- a/CHANGES.txt
+++ b/CHANGES.txt
This file contains the changes to the Roundup system over time. The entries
are given with the most recent entry first.
-2010-02-?? 1.4.13
+2010-02-19 1.4.13
Fixed:
- Multilink edit fields lose their values (thanks Will Maier)
diff --git a/doc/announcement.txt b/doc/announcement.txt
index 51de8d857e7187c451344130dfe4ce67bd8f045a..70a6e9628282c5c82f4534fc6f917f66478ed238 100644 (file)
--- a/doc/announcement.txt
+++ b/doc/announcement.txt
-I'm proud to release version 1.4.12 of Roundup which fixes a number bugs.
+I'm proud to release version 1.4.13 of Roundup which includes a single
+fix for a regression introduced in 1.4.12:
+
+- Multilink edit fields lose their values (thanks Will Maier)
If you're upgrading from an older version of Roundup you *must* follow
the "Software Upgrade" guidelines given in the maintenance documentation.
-This release includes:
-
-- Support IMAP CRAM-MD5, thanks Jochen Maes
-- Proper handling of 'Create' permissions in both mail gateway (earlier
- commit r4405 by Richard), web interface, and xmlrpc. This used to
- check 'Edit' permission previously. See
- http://thread.gmane.org/gmane.comp.bug-tracking.roundup.devel/5133
- Add regression tests for proper handling of 'Create' and 'Edit'
- permissions.
-- Fix handling of non-ascii in realname in the nosy mailer, this used to
- mangle the email address making it unusable when replying. Thanks to
- intevation for funding the fix.
-- Fix documentation on user required to run the tests, fixes
- issue2550618, thanks to Chris aka 'radioking'
-- Add simple doc about translating customised tracker content
-- Add "flup" setup documentation, thanks Christian Glass
-- Fix "Web Access" permission check to allow serving of static files to
- Anonymous again
-- Add check for "Web Access" permission in all web templating permission
- checks
-- Improvements in upgrading documentation, thanks Christian Glass
-- Display 'today' in the account user's timezone, thanks David Wolever
-- Fix file handle leak in some web interfaces with logging turned on,
- fixes issue1675845
-- Attempt to generate more human-readable addresses in email, fixes
- issue2550632
-- Allow value to be specified to multilink form element templating, fixes
- issue2550613, thanks David Wolever
-- Fix thread safety with stdin in roundup-server, fixes issue2550596
- (thanks Werner Hunger)
-
Roundup requires python 2.3 or later (but not 3+) for correct operation.
To give Roundup a try, just download (see below), unpack and run::
diff --git a/doc/conf.py b/doc/conf.py
index 9837f89345e0531ce3bc0e5fe8bfedb0bc7ba240..3276c57b4880c8fe865d6b7be94d3340a16fac6c 100644 (file)
--- a/doc/conf.py
+++ b/doc/conf.py
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
- ('docs/index', 'Roundup.tex', ur'Roundup Documentation',
- ur'Richard Jones', 'manual'),
+ ('docs/index', 'Roundup.tex', u'Roundup Documentation',
+ u'Richard Jones', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
diff --git a/roundup/__init__.py b/roundup/__init__.py
index 3ef19197686dd4f2985c23742602abf34069ef3c..f646c11e1fa06e9d75249083a2b9c9f398096d58 100644 (file)
--- a/roundup/__init__.py
+++ b/roundup/__init__.py
'''
__docformat__ = 'restructuredtext'
-__version__ = '1.4.12'
+__version__ = '1.5.0'
# vim: set filetype=python ts=4 sw=4 et si
diff --git a/roundup/actions.py b/roundup/actions.py
index ea1a15ee78f3122c4a3d2cc8bacfb36194c72a3a..e3bbec5622764f21b8f1b249a9f024dc4821eac2 100644 (file)
--- a/roundup/actions.py
+++ b/roundup/actions.py
# make sure we don't try to retire admin or anonymous
if (classname == 'user' and
self.db.user.get(itemid, 'username') in ('admin', 'anonymous')):
- raise ValueError, self._(
- 'You may not retire the admin or anonymous user')
+ raise ValueError(self._(
+ 'You may not retire the admin or anonymous user'))
# do the retire
self.db.getclass(classname).retire(itemid)
diff --git a/roundup/admin.py b/roundup/admin.py
index a8d75d1f7734fa84b368b5db0c6137ec302d55a9..1b583e3196afa767ca89344abbc6a7e0733b28f3 100644 (file)
--- a/roundup/admin.py
+++ b/roundup/admin.py
"""
__docformat__ = 'restructuredtext'
-import csv, getopt, getpass, os, re, shutil, sys, UserDict
+import csv, getopt, getpass, os, re, shutil, sys, UserDict, operator
from roundup import date, hyperdb, roundupdb, init, password, token
from roundup import __version__ as roundup_version
"""
_marker = []
def get(self, key, default=_marker):
- if self.data.has_key(key):
+ if key in self.data:
return [(key, self.data[key])]
- keylist = self.data.keys()
- keylist.sort()
+ keylist = sorted(self.data)
l = []
for ki in keylist:
if ki.startswith(key):
l.append((ki, self.data[ki]))
if not l and default is self._marker:
- raise KeyError, key
+ raise KeyError(key)
return l
class AdminTool:
"""
def __init__(self):
self.commands = CommandDict()
- for k in AdminTool.__dict__.keys():
+ for k in AdminTool.__dict__:
if k[:3] == 'do_':
self.commands[k[3:]] = getattr(self, k)
self.help = {}
- for k in AdminTool.__dict__.keys():
+ for k in AdminTool.__dict__:
if k[:5] == 'help_':
self.help[k[5:]] = getattr(self, k)
self.tracker_home = ''
try:
return self.db.getclass(classname)
except KeyError:
- raise UsageError, _('no such class "%(classname)s"')%locals()
+ raise UsageError(_('no such class "%(classname)s"')%locals())
def props_from_args(self, args):
""" Produce a dictionary of prop: value from the args list.
props = {}
for arg in args:
if arg.find('=') == -1:
- raise UsageError, _('argument "%(arg)s" not propname=value'
- )%locals()
+ raise UsageError(_('argument "%(arg)s" not propname=value'
+ )%locals())
l = arg.split('=')
if len(l) < 2:
- raise UsageError, _('argument "%(arg)s" not propname=value'
- )%locals()
+ raise UsageError(_('argument "%(arg)s" not propname=value'
+ )%locals())
key, value = l[0], '='.join(l[1:])
if value:
props[key] = value
"""
print _('Commands:'),
commands = ['']
- for command in self.commands.values():
+ for command in self.commands.itervalues():
h = _(command.__doc__).split('\n')[0]
commands.append(' '+h[7:])
commands.sort()
def help_commands_html(self, indent_re=re.compile(r'^(\s+)\S+')):
""" Produce an HTML command list.
"""
- commands = self.commands.values()
- def sortfun(a, b):
- return cmp(a.__name__, b.__name__)
- commands.sort(sortfun)
+ commands = sorted(self.commands.itervalues(),
+ operator.attrgetter('__name__'))
for command in commands:
h = _(command.__doc__).split('\n')
name = command.__name__[3:]
# try help_ methods
- if self.help.has_key(topic):
+ if topic in self.help:
self.help[topic]()
return 0
def help_initopts(self):
templates = self.listTemplates()
- print _('Templates:'), ', '.join(templates.keys())
+ print _('Templates:'), ', '.join(templates)
import roundup.backends
backends = roundup.backends.list_backends()
print _('Back ends:'), ', '.join(backends)
See also initopts help.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
# make sure the tracker home can be created
tracker_home = os.path.abspath(tracker_home)
parent = os.path.split(tracker_home)[0]
if not os.path.exists(parent):
- raise UsageError, _('Instance home parent directory "%(parent)s"'
- ' does not exist')%locals()
+ raise UsageError(_('Instance home parent directory "%(parent)s"'
+ ' does not exist')%locals())
config_ini_file = os.path.join(tracker_home, CoreConfig.INI_FILE)
# check for both old- and new-style configs
- if filter(os.path.exists, [config_ini_file,
- os.path.join(tracker_home, 'config.py')]):
+ if list(filter(os.path.exists, [config_ini_file,
+ os.path.join(tracker_home, 'config.py')])):
ok = raw_input(_(
"""WARNING: There appears to be a tracker in "%(tracker_home)s"!
If you re-install it, you will lose all the data!
# select template
templates = self.listTemplates()
template = len(args) > 1 and args[1] or ''
- if not templates.has_key(template):
- print _('Templates:'), ', '.join(templates.keys())
- while not templates.has_key(template):
+ if template not in templates:
+ print _('Templates:'), ', '.join(templates)
+ while template not in templates:
template = raw_input(_('Select template [classic]: ')).strip()
if not template:
template = 'classic'
need_set = CoreConfig(tracker_home)._get_unset_options()
if need_set:
print _(" ... at a minimum, you must set following options:")
- for section, options in need_set.items():
- print " [%s]: %s" % (section, ", ".join(options))
+ for section in need_set:
+ print " [%s]: %s" % (section, ", ".join(need_set[section]))
# note about schema modifications
print _("""
in <filename>.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
config = CoreConfig()
config.save(args[0])
# make sure the tracker home is installed
if not os.path.exists(tracker_home):
- raise UsageError, _('Instance home does not exist')%locals()
+ raise UsageError(_('Instance home does not exist')%locals())
try:
tracker = roundup.instance.open(tracker_home)
except roundup.instance.TrackerError:
- raise UsageError, _('Instance has not been installed')%locals()
+ raise UsageError(_('Instance has not been installed')%locals())
# is there already a database?
if tracker.exists():
by the designators.
"""
if len(args) < 2:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
propname = args[0]
designators = args[1].split(',')
l = []
try:
classname, nodeid = hyperdb.splitDesignator(designator)
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
# get the class
cl = self.get_class(classname)
property = properties[propname]
if not (isinstance(property, hyperdb.Multilink) or
isinstance(property, hyperdb.Link)):
- raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname
+ raise UsageError(_('property %s is not of type'
+ ' Multilink or Link so -d flag does not '
+ 'apply.')%propname)
propclassname = self.db.getclass(property.classname).classname
id = cl.get(nodeid, propname)
for i in id:
property = properties[propname]
if not (isinstance(property, hyperdb.Multilink) or
isinstance(property, hyperdb.Link)):
- raise UsageError, _('property %s is not of type Multilink or Link so -d flag does not apply.')%propname
+ raise UsageError(_('property %s is not of type'
+ ' Multilink or Link so -d flag does not '
+ 'apply.')%propname)
propclassname = self.db.getclass(property.classname).classname
id = cl.get(nodeid, propname)
for i in id:
else:
print cl.get(nodeid, propname)
except IndexError:
- raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+ raise UsageError(_('no such %(classname)s node '
+ '"%(nodeid)s"')%locals())
except KeyError:
- raise UsageError, _('no such %(classname)s property '
- '"%(propname)s"')%locals()
+ raise UsageError(_('no such %(classname)s property '
+ '"%(propname)s"')%locals())
if self.separator:
print self.separator.join(l)
ids for the multilink as comma-separated numbers (ie "1,2,3").
"""
if len(args) < 2:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
from roundup import hyperdb
designators = args[0].split(',')
try:
designators = [hyperdb.splitDesignator(x) for x in designators]
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
# get the props from the args
props = self.props_from_args(args[1:])
props[key] = hyperdb.rawToHyperdb(self.db, cl, itemid,
key, value)
except hyperdb.HyperdbValueError, message:
- raise UsageError, message
+ raise UsageError(message)
# try the set
try:
- apply(cl.set, (itemid, ), props)
+ cl.set(itemid, **props)
except (TypeError, IndexError, ValueError), message:
import traceback; traceback.print_exc()
- raise UsageError, message
+ raise UsageError(message)
self.db_uncommitted = True
return 0
value.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
classname = args[0]
# get the class
cl = self.get_class(classname)
props = self.props_from_args(args[1:])
# convert the user-input value to a value used for find()
- for propname, value in props.items():
+ for propname, value in props.iteritems():
if ',' in value:
values = value.split(',')
else:
designator = []
if self.separator:
if self.print_designator:
- id=apply(cl.find, (), props)
+ id = cl.find(**props)
for i in id:
designator.append(classname + i)
print self.separator.join(designator)
else:
- print self.separator.join(apply(cl.find, (), props))
+ print self.separator.join(cl.find(**props))
else:
if self.print_designator:
- id=apply(cl.find, (), props)
+ id = cl.find(**props)
for i in id:
designator.append(classname + i)
print designator
else:
- print apply(cl.find, (), props)
+ print cl.find(**props)
except KeyError:
- raise UsageError, _('%(classname)s has no property '
- '"%(propname)s"')%locals()
+ raise UsageError(_('%(classname)s has no property '
+ '"%(propname)s"')%locals())
except (ValueError, TypeError), message:
- raise UsageError, message
+ raise UsageError(message)
return 0
def do_specification(self, args):
This lists the properties for a given class.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
classname = args[0]
# get the class
cl = self.get_class(classname)
# get the key property
keyprop = cl.getkey()
- for key, value in cl.properties.items():
+ for key in cl.properties:
+ value = cl.properties[key]
if keyprop == key:
print _('%(key)s: %(value)s (key property)')%locals()
else:
node.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
# decode the node designator
for designator in args[0].split(','):
try:
classname, nodeid = hyperdb.splitDesignator(designator)
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
# get the class
cl = self.get_class(classname)
# display the values
- keys = cl.properties.keys()
- keys.sort()
+ keys = sorted(cl.properties)
for key in keys:
value = cl.get(nodeid, key)
print _('%(key)s: %(value)s')%locals()
command.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
from roundup import hyperdb
classname = args[0]
properties = cl.getprops(protected = 0)
if len(args) == 1:
# ask for the properties
- for key, value in properties.items():
+ for key in properties:
if key == 'id': continue
+ value = properties[key]
name = value.__class__.__name__
if isinstance(value , hyperdb.Password):
again = None
props = self.props_from_args(args[1:])
# convert types
- for propname, value in props.items():
+ for propname in props:
try:
props[propname] = hyperdb.rawToHyperdb(self.db, cl, None,
- propname, value)
+ propname, props[propname])
except hyperdb.HyperdbValueError, message:
- raise UsageError, message
+ raise UsageError(message)
# check for the key property
propname = cl.getkey()
- if propname and not props.has_key(propname):
- raise UsageError, _('you must provide the "%(propname)s" '
- 'property.')%locals()
+ if propname and propname not in props:
+ raise UsageError(_('you must provide the "%(propname)s" '
+ 'property.')%locals())
# do the actual create
try:
- print apply(cl.create, (), props)
+ print cl.create(**props)
except (TypeError, IndexError, ValueError), message:
- raise UsageError, message
+ raise UsageError(message)
self.db_uncommitted = True
return 0
for every class instance.
"""
if len(args) > 2:
- raise UsageError, _('Too many arguments supplied')
+ raise UsageError(_('Too many arguments supplied'))
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
classname = args[0]
# get the class
try:
proplist.append(cl.get(nodeid, propname))
except KeyError:
- raise UsageError, _('%(classname)s has no property '
- '"%(propname)s"')%locals()
+ raise UsageError(_('%(classname)s has no property '
+ '"%(propname)s"')%locals())
print self.separator.join(proplist)
else:
# create a list of index id's since user didn't specify
try:
value = cl.get(nodeid, propname)
except KeyError:
- raise UsageError, _('%(classname)s has no property '
- '"%(propname)s"')%locals()
+ raise UsageError(_('%(classname)s has no property '
+ '"%(propname)s"')%locals())
print _('%(nodeid)4s: %(value)s')%locals()
return 0
will result in a the 4 character wide "Name" column.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
classname = args[0]
# get the class
try:
propname, width = spec.split(':')
except (ValueError, TypeError):
- raise UsageError, _('"%(spec)s" not name:width')%locals()
+ raise UsageError(_('"%(spec)s" not '
+ 'name:width')%locals())
else:
propname = spec
- if not all_props.has_key(propname):
- raise UsageError, _('%(classname)s has no property '
- '"%(propname)s"')%locals()
+ if propname not in all_props:
+ raise UsageError(_('%(classname)s has no property '
+ '"%(propname)s"')%locals())
else:
- prop_names = cl.getprops().keys()
+ prop_names = cl.getprops()
# now figure column widths
props = []
Lists the journal entries for the node identified by the designator.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
try:
classname, nodeid = hyperdb.splitDesignator(args[0])
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
try:
print self.db.getclass(classname).history(nodeid)
except KeyError:
- raise UsageError, _('no such class "%(classname)s"')%locals()
+ raise UsageError(_('no such class "%(classname)s"')%locals())
except IndexError:
- raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+ raise UsageError(_('no such %(classname)s node '
+ '"%(nodeid)s"')%locals())
return 0
def do_commit(self, args):
by the list or find commands, and its key value may be re-used.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
designators = args[0].split(',')
for designator in designators:
try:
classname, nodeid = hyperdb.splitDesignator(designator)
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
try:
self.db.getclass(classname).retire(nodeid)
except KeyError:
- raise UsageError, _('no such class "%(classname)s"')%locals()
+ raise UsageError(_('no such class "%(classname)s"')%locals())
except IndexError:
- raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+ raise UsageError(_('no such %(classname)s node '
+ '"%(nodeid)s"')%locals())
self.db_uncommitted = True
return 0
The given nodes will become available for users again.
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
designators = args[0].split(',')
for designator in designators:
try:
classname, nodeid = hyperdb.splitDesignator(designator)
except hyperdb.DesignatorError, message:
- raise UsageError, message
+ raise UsageError(message)
try:
self.db.getclass(classname).restore(nodeid)
except KeyError:
- raise UsageError, _('no such class "%(classname)s"')%locals()
+ raise UsageError(_('no such class "%(classname)s"')%locals())
except IndexError:
- raise UsageError, _('no such %(classname)s node "%(nodeid)s"')%locals()
+ raise UsageError(_('no such %(classname)s node '
+ '"%(nodeid)s"')%locals())
self.db_uncommitted = True
return 0
"""
# grab the directory to export to
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
dir = args[-1]
# get the list of classes to export
if len(args) == 2:
if args[0].startswith('-'):
- classes = [ c for c in self.db.classes.keys()
+ classes = [ c for c in self.db.classes
if not c in args[0][1:].split(',') ]
else:
classes = args[0].split(',')
else:
- classes = self.db.classes.keys()
+ classes = self.db.classes
class colon_separated(csv.excel):
delimiter = ':'
sys.stdout.write("\nExporting Journal for %s\n" % classname)
sys.stdout.flush()
journals = csv.writer(jf, colon_separated)
- map(journals.writerow, cl.export_journals())
+ for row in cl.export_journals():
+ journals.writerow(row)
jf.close()
if max_len > self.db.config.CSV_FIELD_SIZE:
print >> sys.stderr, \
database (or, tediously, retire all the old data.)
"""
if len(args) < 1:
- raise UsageError, _('Not enough arguments supplied')
+ raise UsageError(_('Not enough arguments supplied'))
from roundup import hyperdb
if hasattr (csv, 'field_size_limit'):
if hasattr(cl, 'import_files'):
cl.import_files(dir, nodeid)
maxid = max(maxid, int(nodeid))
+
+ # (print to sys.stdout here to allow tests to squash it .. ugh)
print >> sys.stdout
+
f.close()
# import the journals
cl.import_journals(reader)
f.close()
- # set the id counter
+ # (print to sys.stdout here to allow tests to squash it .. ugh)
print >> sys.stdout, 'setting', classname, maxid+1
+
+ # set the id counter
self.db.setid(classname, str(maxid+1))
self.db_uncommitted = True
2001-01-01
"""
- if len(args) <> 1:
- raise UsageError, _('Not enough arguments supplied')
+ if len(args) != 1:
+ raise UsageError(_('Not enough arguments supplied'))
# are we dealing with a period or a date
value = args[0]
""", re.VERBOSE)
m = date_re.match(value)
if not m:
- raise ValueError, _('Invalid format')
+ raise ValueError(_('Invalid format'))
m = m.groupdict()
if m['period']:
pack_before = date.Date(". - %s"%value)
try:
cl.index(m.group(2))
except IndexError:
- raise UsageError, _('no such item "%(designator)s"')%{
- 'designator': arg}
+ raise UsageError(_('no such item "%(designator)s"')%{
+ 'designator': arg})
else:
cl = self.get_class(arg)
self.db.reindex(arg)
print _('No such Role "%(role)s"')%locals()
return 1
else:
- roles = self.db.security.role.items()
+ roles = list(self.db.security.role.items())
role = self.db.config.NEW_WEB_USER_ROLES
if ',' in role:
print _('New Web users get the Roles "%(role)s"')%locals()
self.tracker_home = os.environ.get('TRACKER_HOME', '')
# TODO: reinstate the user/password stuff (-u arg too)
name = password = ''
- if os.environ.has_key('ROUNDUP_LOGIN'):
+ if 'ROUNDUP_LOGIN' in os.environ:
l = os.environ['ROUNDUP_LOGIN'].split(':')
name = l[0]
if len(l) > 1:
index 87351518219103fa937c9eb15a35c5188b1c4c4b..1fc794888400e87c30a1df4ad50dd61ac872ef0e 100644 (file)
'''Get a specific backend by name.'''
vars = globals()
# if requested backend has been imported yet, return current instance
- if vars.has_key(name):
+ if name in vars:
return vars[name]
# import the backend module
module_name = 'back_%s' % name
index cfda7577f1f07bd20858c75dcd85551419e3312d..3962a637f3429b4e5dfa485f9b9348cddce23ab4 100644 (file)
"""
__docformat__ = 'restructuredtext'
-try:
- import anydbm, sys
- # dumbdbm only works in python 2.1.2+
- if sys.version_info < (2,1,2):
- import dumbdbm
- assert anydbm._defaultmod != dumbdbm
- del dumbdbm
-except AssertionError:
- print "WARNING: you should upgrade to python 2.1.3"
+import os, marshal, re, weakref, string, copy, time, shutil, logging
-import whichdb, os, marshal, re, weakref, string, copy, time, shutil, logging
+from roundup.anypy.dbm_ import anydbm, whichdb
from roundup import hyperdb, date, password, roundupdb, security, support
from roundup.support import reversed
from roundup.backends import locking
from roundup.i18n import _
-from blobfiles import FileStorage
-from sessions_dbm import Sessions, OneTimeKeys
+from roundup.backends.blobfiles import FileStorage
+from roundup.backends.sessions_dbm import Sessions, OneTimeKeys
try:
- from indexer_xapian import Indexer
+ from roundup.backends.indexer_xapian import Indexer
except ImportError:
- from indexer_dbm import Indexer
+ from roundup.backends.indexer_dbm import Indexer
def db_exists(config):
# check for the user db
#
def __getattr__(self, classname):
"""A convenient way of calling self.getclass(classname)."""
- if self.classes.has_key(classname):
+ if classname in self.classes:
return self.classes[classname]
raise AttributeError, classname
def addclass(self, cl):
cn = cl.classname
- if self.classes.has_key(cn):
+ if cn in self.classes:
raise ValueError, cn
self.classes[cn] = cl
def getclasses(self):
"""Return a list of the names of all existing classes."""
- l = self.classes.keys()
- l.sort()
- return l
+ return sorted(self.classes)
def getclass(self, classname):
"""Get the Class object representing a particular class.
try:
return self.classes[classname]
except KeyError:
- raise KeyError, 'There is no class called "%s"'%classname
+ raise KeyError('There is no class called "%s"'%classname)
#
# Class DBs
"""Delete all database contents
"""
logging.getLogger('hyperdb').info('clear')
- for cn in self.classes.keys():
+ for cn in self.classes:
for dummy in 'nodes', 'journals':
path = os.path.join(self.dir, 'journals.%s'%cn)
if os.path.exists(path):
"""
db_type = ''
if os.path.exists(path):
- db_type = whichdb.whichdb(path)
+ db_type = whichdb(path)
if not db_type:
- raise hyperdb.DatabaseError, \
- _("Couldn't identify database type")
+ raise hyperdb.DatabaseError(_("Couldn't identify database type"))
elif os.path.exists(path+'.db'):
# if the path ends in '.db', it's a dbm database, whether
# anydbm says it's dbhash or not!
db_type = self.determine_db_type(path)
# new database? let anydbm pick the best dbm
- if not db_type:
+ # in Python 3+ the "dbm" ("anydbm" to us) module already uses the
+ # whichdb() function to do this
+ if not db_type or hasattr(anydbm, 'whichdb'):
if __debug__:
- logging.getLogger('hyperdb').debug("opendb anydbm.open(%r, 'c')"%path)
+ logging.getLogger('hyperdb').debug(
+ "opendb anydbm.open(%r, 'c')"%path)
return anydbm.open(path, 'c')
- # open the database with the correct module
+ # in Python <3 it anydbm was a little dumb so manually open the
+ # database with the correct module
try:
dbm = __import__(db_type)
except ImportError:
- raise hyperdb.DatabaseError, \
- _("Couldn't open database - the required module '%s'"\
- " is not available")%db_type
+ raise hyperdb.DatabaseError(_("Couldn't open database - the "
+ "required module '%s' is not available")%db_type)
if __debug__:
- logging.getLogger('hyperdb').debug("opendb %r.open(%r, %r)"%(db_type, path,
- mode))
+ logging.getLogger('hyperdb').debug(
+ "opendb %r.open(%r, %r)"%(db_type, path, mode))
return dbm.open(path, mode)
#
"""
# open the ids DB - create if if doesn't exist
db = self.opendb('_ids', 'c')
- if db.has_key(classname):
+ if classname in db:
newid = db[classname] = str(int(db[classname]) + 1)
else:
# the count() bit is transitional - older dbs won't start at 1
""" add the specified node to its class's db
"""
# we'll be supplied these props if we're doing an import
- if not node.has_key('creator'):
+ if 'creator' not in node:
# add in the "calculated" properties (dupe so we don't affect
# calling code's node assumptions)
node = node.copy()
"""
# try the cache
cache_dict = self.cache.setdefault(classname, {})
- if cache_dict.has_key(nodeid):
+ if nodeid in cache_dict:
if __debug__:
logging.getLogger('hyperdb').debug('get %s%s cached'%(classname, nodeid))
self.stats['cache_hits'] += 1
# get from the database and save in the cache
if db is None:
db = self.getclassdb(classname)
- if not db.has_key(nodeid):
- raise IndexError, "no such %s %s"%(classname, nodeid)
+ if nodeid not in db:
+ raise IndexError("no such %s %s"%(classname, nodeid))
# check the uncommitted, destroyed nodes
- if (self.destroyednodes.has_key(classname) and
- self.destroyednodes[classname].has_key(nodeid)):
- raise IndexError, "no such %s %s"%(classname, nodeid)
+ if (classname in self.destroyednodes and
+ nodeid in self.destroyednodes[classname]):
+ raise IndexError("no such %s %s"%(classname, nodeid))
# decode
res = marshal.loads(db[nodeid])
logging.getLogger('hyperdb').info('destroy %s%s'%(classname, nodeid))
# remove from cache and newnodes if it's there
- if (self.cache.has_key(classname) and
- self.cache[classname].has_key(nodeid)):
+ if (classname in self.cache and nodeid in self.cache[classname]):
del self.cache[classname][nodeid]
- if (self.newnodes.has_key(classname) and
- self.newnodes[classname].has_key(nodeid)):
+ if (classname in self.newnodes and nodeid in self.newnodes[classname]):
del self.newnodes[classname][nodeid]
# see if there's any obvious commit actions that we should get rid of
"""
properties = self.getclass(classname).getprops()
d = {}
- for k, v in node.items():
+ for k, v in node.iteritems():
if k == self.RETIRED_FLAG:
d[k] = v
continue
# if the property doesn't exist then we really don't care
- if not properties.has_key(k):
+ if k not in properties:
continue
# get the property spec
"""
properties = self.getclass(classname).getprops()
d = {}
- for k, v in node.items():
+ for k, v in node.iteritems():
# if the property doesn't exist, or is the "retired" flag then
# it won't be in the properties dict
- if not properties.has_key(k):
+ if k not in properties:
d[k] = v
continue
"""
# try the cache
cache = self.cache.setdefault(classname, {})
- if cache.has_key(nodeid):
+ if nodeid in cache:
return 1
# not in the cache - check the database
if db is None:
db = self.getclassdb(classname)
- res = db.has_key(nodeid)
- return res
+ return nodeid in db
def countnodes(self, classname, db=None):
count = 0
# include the uncommitted nodes
- if self.newnodes.has_key(classname):
+ if classname in self.newnodes:
count += len(self.newnodes[classname])
- if self.destroyednodes.has_key(classname):
+ if classname in self.destroyednodes:
count -= len(self.destroyednodes[classname])
# and count those in the DB
if db is None:
db = self.getclassdb(classname)
- count = count + len(db.keys())
- return count
+ return count + len(db)
#
db = self.opendb('journals.%s'%classname, 'r')
except anydbm.error, error:
if str(error) == "need 'c' or 'n' flag to open new db":
- raise IndexError, 'no such %s %s'%(classname, nodeid)
+ raise IndexError('no such %s %s'%(classname, nodeid))
elif error.args[0] != 2:
# this isn't a "not found" error, be alarmed!
raise
if res:
# we have unsaved journal entries, return them
return res
- raise IndexError, 'no such %s %s'%(classname, nodeid)
+ raise IndexError('no such %s %s'%(classname, nodeid))
try:
journal = marshal.loads(db[nodeid])
except KeyError:
if res:
# we have some unsaved journal entries, be happy!
return res
- raise IndexError, 'no such %s %s'%(classname, nodeid)
+ raise IndexError('no such %s %s'%(classname, nodeid))
db.close()
# add all the saved journal entries for this node
db_type = self.determine_db_type(path)
db = self.opendb(db_name, 'w')
- for key in db.keys():
+ for key in db:
# get the journal for this db entry
journal = marshal.loads(db[key])
l = []
reindex[method(*args)] = 1
finally:
# make sure we close all the database files
- for db in self.databases.values():
+ for db in self.databases.itervalues():
db.close()
del self.databases
self.transactions = []
# reindex the nodes that request it
- for classname, nodeid in filter(None, reindex.keys()):
+ for classname, nodeid in [k for k in reindex if k]:
self.getclass(classname).index(nodeid)
# save the indexer state
"""
# get the database handle
db_name = 'nodes.%s'%classname
- if not self.databases.has_key(db_name):
+ if db_name not in self.databases:
self.databases[db_name] = self.getclassdb(classname, 'c')
return self.databases[db_name]
"""
# get the database handle
db_name = 'journals.%s'%classname
- if not self.databases.has_key(db_name):
+ if db_name not in self.databases:
self.databases[db_name] = self.opendb(db_name, 'c')
return self.databases[db_name]
db = self.getCachedJournalDB(classname)
# now insert the journal entry
- if db.has_key(nodeid):
+ if nodeid in db:
# append to existing
s = db[nodeid]
l = marshal.loads(s)
def doDestroyNode(self, classname, nodeid):
# delete from the class database
db = self.getCachedClassDB(classname)
- if db.has_key(nodeid):
+ if nodeid in db:
del db[nodeid]
# delete from the database
db = self.getCachedJournalDB(classname)
- if db.has_key(nodeid):
+ if nodeid in db:
del db[nodeid]
def rollback(self):
to modify the "creation" or "activity" properties cause a KeyError.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
self.fireAuditors('create', None, propvalues)
newid = self.create_inner(**propvalues)
self.fireReactors('create', newid, None)
def create_inner(self, **propvalues):
""" Called by create, in-between the audit and react calls.
"""
- if propvalues.has_key('id'):
- raise KeyError, '"id" is reserved'
+ if 'id' in propvalues:
+ raise KeyError('"id" is reserved')
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
- if propvalues.has_key('creation') or propvalues.has_key('activity'):
- raise KeyError, '"creation" and "activity" are reserved'
+ if 'creation' in propvalues or 'activity' in propvalues:
+ raise KeyError('"creation" and "activity" are reserved')
# new node's id
newid = self.db.newid(self.classname)
# validate propvalues
num_re = re.compile('^\d+$')
- for key, value in propvalues.items():
+ for key, value in propvalues.iteritems():
if key == self.key:
try:
self.lookup(value)
except KeyError:
pass
else:
- raise ValueError, 'node with key "%s" exists'%value
+ raise ValueError('node with key "%s" exists'%value)
# try to handle this property
try:
prop = self.properties[key]
except KeyError:
- raise KeyError, '"%s" has no property "%s"'%(self.classname,
- key)
+ raise KeyError('"%s" has no property "%s"'%(self.classname,
+ key))
if value is not None and isinstance(prop, hyperdb.Link):
if type(value) != type(''):
- raise ValueError, 'link value must be String'
+ raise ValueError('link value must be String')
link_class = self.properties[key].classname
# if it isn't a number, it's a key
if not num_re.match(value):
try:
value = self.db.classes[link_class].lookup(value)
except (TypeError, KeyError):
- raise IndexError, 'new property "%s": %s not a %s'%(
- key, value, link_class)
+ raise IndexError('new property "%s": %s not a %s'%(
+ key, value, link_class))
elif not self.db.getclass(link_class).hasnode(value):
- raise IndexError, '%s has no node %s'%(link_class, value)
+ raise IndexError('%s has no node %s'%(link_class,
+ value))
# save off the value
propvalues[key] = value
if value is None:
value = []
if not hasattr(value, '__iter__'):
- raise TypeError, 'new property "%s" not an iterable of ids'%key
+ raise TypeError('new property "%s" not an iterable of ids'%key)
# clean up and validate the list of links
link_class = self.properties[key].classname
l = []
for entry in value:
if type(entry) != type(''):
- raise ValueError, '"%s" multilink value (%r) '\
- 'must contain Strings'%(key, value)
+ raise ValueError('"%s" multilink value (%r) '\
+ 'must contain Strings'%(key, value))
# if it isn't a number, it's a key
if not num_re.match(entry):
try:
entry = self.db.classes[link_class].lookup(entry)
except (TypeError, KeyError):
- raise IndexError, 'new property "%s": %s not a %s'%(
- key, entry, self.properties[key].classname)
+ raise IndexError('new property "%s": %s not a %s'%(
+ key, entry, self.properties[key].classname))
l.append(entry)
value = l
propvalues[key] = value
# handle additions
for nodeid in value:
if not self.db.getclass(link_class).hasnode(nodeid):
- raise IndexError, '%s has no node %s'%(link_class,
- nodeid)
+ raise IndexError('%s has no node %s'%(link_class,
+ nodeid))
# register the link with the newly linked node
if self.do_journal and self.properties[key].do_journal:
self.db.addjournal(link_class, nodeid, 'link',
elif isinstance(prop, hyperdb.String):
if type(value) != type('') and type(value) != type(u''):
- raise TypeError, 'new property "%s" not a string'%key
+ raise TypeError('new property "%s" not a string'%key)
if prop.indexme:
self.db.indexer.add_text((self.classname, newid, key),
value)
elif isinstance(prop, hyperdb.Password):
if not isinstance(value, password.Password):
- raise TypeError, 'new property "%s" not a Password'%key
+ raise TypeError('new property "%s" not a Password'%key)
elif isinstance(prop, hyperdb.Date):
if value is not None and not isinstance(value, date.Date):
- raise TypeError, 'new property "%s" not a Date'%key
+ raise TypeError('new property "%s" not a Date'%key)
elif isinstance(prop, hyperdb.Interval):
if value is not None and not isinstance(value, date.Interval):
- raise TypeError, 'new property "%s" not an Interval'%key
+ raise TypeError('new property "%s" not an Interval'%key)
elif value is not None and isinstance(prop, hyperdb.Number):
try:
float(value)
except ValueError:
- raise TypeError, 'new property "%s" not numeric'%key
+ raise TypeError('new property "%s" not numeric'%key)
elif value is not None and isinstance(prop, hyperdb.Boolean):
try:
int(value)
except ValueError:
- raise TypeError, 'new property "%s" not boolean'%key
+ raise TypeError('new property "%s" not boolean'%key)
# make sure there's data where there needs to be
- for key, prop in self.properties.items():
- if propvalues.has_key(key):
+ for key, prop in self.properties.iteritems():
+ if key in propvalues:
continue
if key == self.key:
- raise ValueError, 'key property "%s" is required'%key
+ raise ValueError('key property "%s" is required'%key)
if isinstance(prop, hyperdb.Multilink):
propvalues[key] = []
# check for one of the special props
if propname == 'creation':
- if d.has_key('creation'):
+ if 'creation' in d:
return d['creation']
if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
+ raise ValueError('Journalling is disabled for this class')
journal = self.db.getjournal(self.classname, nodeid)
if journal:
return journal[0][1]
# on the strange chance that there's no journal
return date.Date()
if propname == 'activity':
- if d.has_key('activity'):
+ if 'activity' in d:
return d['activity']
if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
+ raise ValueError('Journalling is disabled for this class')
journal = self.db.getjournal(self.classname, nodeid)
if journal:
return self.db.getjournal(self.classname, nodeid)[-1][1]
# on the strange chance that there's no journal
return date.Date()
if propname == 'creator':
- if d.has_key('creator'):
+ if 'creator' in d:
return d['creator']
if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
+ raise ValueError('Journalling is disabled for this class')
journal = self.db.getjournal(self.classname, nodeid)
if journal:
num_re = re.compile('^\d+$')
else:
return self.db.getuid()
if propname == 'actor':
- if d.has_key('actor'):
+ if 'actor' in d:
return d['actor']
if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
+ raise ValueError('Journalling is disabled for this class')
journal = self.db.getjournal(self.classname, nodeid)
if journal:
num_re = re.compile('^\d+$')
# get the property (raises KeyErorr if invalid)
prop = self.properties[propname]
- if not d.has_key(propname):
+ if propname not in d:
if default is _marker:
if isinstance(prop, hyperdb.Multilink):
return []
to modify the "creation" or "activity" properties cause a KeyError.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
self.fireAuditors('set', nodeid, propvalues)
oldvalues = copy.deepcopy(self.db.getnode(self.classname, nodeid))
- for name,prop in self.getprops(protected=0).items():
- if oldvalues.has_key(name):
+ for name, prop in self.getprops(protected=0).iteritems():
+ if name in oldvalues:
continue
if isinstance(prop, hyperdb.Multilink):
oldvalues[name] = []
if not propvalues:
return propvalues
- if propvalues.has_key('creation') or propvalues.has_key('activity'):
+ if 'creation' in propvalues or 'activity' in propvalues:
raise KeyError, '"creation" and "activity" are reserved'
- if propvalues.has_key('id'):
+ if 'id' in propvalues:
raise KeyError, '"id" is reserved'
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
node = self.db.getnode(self.classname, nodeid)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
raise IndexError
num_re = re.compile('^\d+$')
# if the journal value is to be different, store it in here
journalvalues = {}
- for propname, value in propvalues.items():
+ # list() propvalues 'cos it might be modified by the loop
+ for propname, value in list(propvalues.items()):
# check to make sure we're not duplicating an existing key
if propname == self.key and node[propname] != value:
try:
except KeyError:
pass
else:
- raise ValueError, 'node with key "%s" exists'%value
+ raise ValueError('node with key "%s" exists'%value)
# this will raise the KeyError if the property isn't valid
# ... we don't use getprops() here because we only care about
try:
prop = self.properties[propname]
except KeyError:
- raise KeyError, '"%s" has no property named "%s"'%(
- self.classname, propname)
+ raise KeyError('"%s" has no property named "%s"'%(
+ self.classname, propname))
# if the value's the same as the existing value, no sense in
# doing anything
link_class = prop.classname
# if it isn't a number, it's a key
if value is not None and not isinstance(value, type('')):
- raise ValueError, 'property "%s" link value be a string'%(
- propname)
+ raise ValueError('property "%s" link value be a string'%(
+ propname))
if isinstance(value, type('')) and not num_re.match(value):
try:
value = self.db.classes[link_class].lookup(value)
except (TypeError, KeyError):
- raise IndexError, 'new property "%s": %s not a %s'%(
- propname, value, prop.classname)
+ raise IndexError('new property "%s": %s not a %s'%(
+ propname, value, prop.classname))
if (value is not None and
not self.db.getclass(link_class).hasnode(value)):
- raise IndexError, '%s has no node %s'%(link_class, value)
+ raise IndexError('%s has no node %s'%(link_class,
+ value))
if self.do_journal and prop.do_journal:
# register the unlink with the old linked node
- if node.has_key(propname) and node[propname] is not None:
+ if propname in node and node[propname] is not None:
self.db.addjournal(link_class, node[propname], 'unlink',
(self.classname, nodeid, propname))
if value is None:
value = []
if not hasattr(value, '__iter__'):
- raise TypeError, 'new property "%s" not an iterable of'\
- ' ids'%propname
+ raise TypeError('new property "%s" not an iterable of'
+ ' ids'%propname)
link_class = self.properties[propname].classname
l = []
for entry in value:
# if it isn't a number, it's a key
if type(entry) != type(''):
- raise ValueError, 'new property "%s" link value ' \
- 'must be a string'%propname
+ raise ValueError('new property "%s" link value '
+ 'must be a string'%propname)
if not num_re.match(entry):
try:
entry = self.db.classes[link_class].lookup(entry)
except (TypeError, KeyError):
- raise IndexError, 'new property "%s": %s not a %s'%(
+ raise IndexError('new property "%s": %s not a %s'%(
propname, entry,
- self.properties[propname].classname)
+ self.properties[propname].classname))
l.append(entry)
value = l
propvalues[propname] = value
remove = []
# handle removals
- if node.has_key(propname):
+ if propname in node:
l = node[propname]
else:
l = []
# handle additions
for id in value:
if not self.db.getclass(link_class).hasnode(id):
- raise IndexError, '%s has no node %s'%(link_class, id)
+ raise IndexError('%s has no node %s'%(link_class,
+ id))
if id in l:
continue
# register the link with the newly linked node
elif isinstance(prop, hyperdb.String):
if value is not None and type(value) != type('') and type(value) != type(u''):
- raise TypeError, 'new property "%s" not a string'%propname
+ raise TypeError('new property "%s" not a '
+ 'string'%propname)
if prop.indexme:
self.db.indexer.add_text((self.classname, nodeid, propname),
value)
elif isinstance(prop, hyperdb.Password):
if not isinstance(value, password.Password):
- raise TypeError, 'new property "%s" not a Password'%propname
+ raise TypeError('new property "%s" not a '
+ 'Password'%propname)
propvalues[propname] = value
elif value is not None and isinstance(prop, hyperdb.Date):
if not isinstance(value, date.Date):
- raise TypeError, 'new property "%s" not a Date'% propname
+ raise TypeError('new property "%s" not a '
+ 'Date'%propname)
propvalues[propname] = value
elif value is not None and isinstance(prop, hyperdb.Interval):
if not isinstance(value, date.Interval):
- raise TypeError, 'new property "%s" not an '\
- 'Interval'%propname
+ raise TypeError('new property "%s" not an '
+ 'Interval'%propname)
propvalues[propname] = value
elif value is not None and isinstance(prop, hyperdb.Number):
try:
float(value)
except ValueError:
- raise TypeError, 'new property "%s" not numeric'%propname
+ raise TypeError('new property "%s" not '
+ 'numeric'%propname)
elif value is not None and isinstance(prop, hyperdb.Boolean):
try:
int(value)
except ValueError:
- raise TypeError, 'new property "%s" not boolean'%propname
+ raise TypeError('new property "%s" not '
+ 'boolean'%propname)
node[propname] = value
to modify the "creation" or "activity" properties cause a KeyError.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
self.fireAuditors('retire', nodeid, None)
Make node available for all operations like it was before retirement.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
node = self.db.getnode(self.classname, nodeid)
# check if key property was overrided
except KeyError:
pass
else:
- raise KeyError, "Key property (%s) of retired node clashes with \
- existing one (%s)" % (key, node[key])
+ raise KeyError("Key property (%s) of retired node clashes "
+ "with existing one (%s)" % (key, node[key]))
# Now we can safely restore node
self.fireAuditors('restore', nodeid, None)
del node[self.db.RETIRED_FLAG]
"""Return true if the node is retired.
"""
node = self.db.getnode(self.classname, nodeid, cldb)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
return 1
return 0
support the session storage of the cgi interface.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
self.db.destroynode(self.classname, nodeid)
def history(self, nodeid):
'tag' is the journaltag specified when the database was opened.
"""
if not self.do_journal:
- raise ValueError, 'Journalling is disabled for this class'
+ raise ValueError('Journalling is disabled for this class')
return self.db.getjournal(self.classname, nodeid)
# Locating nodes:
"""
prop = self.getprops()[propname]
if not isinstance(prop, hyperdb.String):
- raise TypeError, 'key properties must be String'
+ raise TypeError('key properties must be String')
self.key = propname
def getkey(self):
otherwise a KeyError is raised.
"""
if not self.key:
- raise TypeError, 'No key property set for class %s'%self.classname
+ raise TypeError('No key property set for '
+ 'class %s'%self.classname)
cldb = self.db.getclassdb(self.classname)
try:
for nodeid in self.getnodeids(cldb):
node = self.db.getnode(self.classname, nodeid, cldb)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
continue
- if not node.has_key(self.key):
+ if self.key not in node:
continue
if node[self.key] == keyvalue:
return nodeid
finally:
cldb.close()
- raise KeyError, 'No key (%s) value "%s" for "%s"'%(self.key,
- keyvalue, self.classname)
+ raise KeyError('No key (%s) value "%s" for "%s"'%(self.key,
+ keyvalue, self.classname))
# change from spec - allows multiple props to match
def find(self, **propspec):
db.issue.find(messages='1')
db.issue.find(messages={'1':1,'3':1}, files={'7':1})
"""
- propspec = propspec.items()
- for propname, itemids in propspec:
+ for propname, itemids in propspec.iteritems():
# check the prop is OK
prop = self.properties[propname]
if not isinstance(prop, hyperdb.Link) and not isinstance(prop, hyperdb.Multilink):
- raise TypeError, "'%s' not a Link/Multilink property"%propname
+ raise TypeError("'%s' not a Link/Multilink "
+ "property"%propname)
# ok, now do the find
cldb = self.db.getclassdb(self.classname)
try:
for id in self.getnodeids(db=cldb):
item = self.db.getnode(self.classname, id, db=cldb)
- if item.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in item:
continue
- for propname, itemids in propspec:
+ for propname, itemids in propspec.iteritems():
if type(itemids) is not type({}):
itemids = {itemids:1}
# special case if the item doesn't have this property
- if not item.has_key(propname):
- if itemids.has_key(None):
+ if propname not in item:
+ if None in itemids:
l.append(id)
break
continue
# grab the property definition and its value on this item
prop = self.properties[propname]
value = item[propname]
- if isinstance(prop, hyperdb.Link) and itemids.has_key(value):
+ if isinstance(prop, hyperdb.Link) and value in itemids:
l.append(id)
break
elif isinstance(prop, hyperdb.Multilink):
hit = 0
for v in value:
- if itemids.has_key(v):
+ if v in itemids:
l.append(id)
hit = 1
break
The return is a list of the id of all nodes that match.
"""
- for propname in requirements.keys():
+ for propname in requirements:
prop = self.properties[propname]
if not isinstance(prop, hyperdb.String):
- raise TypeError, "'%s' not a String property"%propname
+ raise TypeError("'%s' not a String property"%propname)
requirements[propname] = requirements[propname].lower()
l = []
cldb = self.db.getclassdb(self.classname)
try:
for nodeid in self.getnodeids(cldb):
node = self.db.getnode(self.classname, nodeid, cldb)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
continue
- for key, value in requirements.items():
- if not node.has_key(key):
+ for key, value in requirements.iteritems():
+ if key not in node:
break
if node[key] is None or node[key].lower() != value:
break
try:
for nodeid in self.getnodeids(cldb):
node = self.db.getnode(cn, nodeid, cldb)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
continue
l.append(nodeid)
finally:
res = []
# start off with the new nodes
- if self.db.newnodes.has_key(self.classname):
- res += self.db.newnodes[self.classname].keys()
+ if self.classname in self.db.newnodes:
+ res.extend(self.db.newnodes[self.classname])
must_close = False
if db is None:
db = self.db.getclassdb(self.classname)
must_close = True
try:
- res = res + db.keys()
+ res.extend(db)
# remove the uncommitted, destroyed nodes
- if self.db.destroyednodes.has_key(self.classname):
- for nodeid in self.db.destroyednodes[self.classname].keys():
- if db.has_key(nodeid):
+ if self.classname in self.db.destroyednodes:
+ for nodeid in self.db.destroyednodes[self.classname]:
+ if nodeid in db:
res.remove(nodeid)
# check retired flag
l = []
for nodeid in res:
node = self.db.getnode(self.classname, nodeid, db)
- is_ret = node.has_key(self.db.RETIRED_FLAG)
+ is_ret = self.db.RETIRED_FLAG in node
if retired == is_ret:
l.append(nodeid)
res = l
INTERVAL = 'spec:interval'
OTHER = 'spec:other'
- for k, v in filterspec.items():
+ for k, v in filterspec.iteritems():
propclass = props[k]
if isinstance(propclass, hyperdb.Link):
if type(v) is not type([]):
# TODO: only full-scan once (use items())
for nodeid in self.getnodeids(cldb):
node = self.db.getnode(cn, nodeid, cldb)
- if node.has_key(self.db.RETIRED_FLAG):
+ if self.db.RETIRED_FLAG in node:
continue
# apply filter
for t, k, v in filterspec:
try:
v = item[prop]
except KeyError:
- if JPROPS.has_key(prop):
+ if prop in JPROPS:
# force lookup of the special journal prop
v = self.get(itemid, prop)
else:
key = link.orderprop()
child = pt.propdict[key]
if key!='id':
- if not lcache.has_key(v):
+ if v not in lcache:
# open the link class db if it's not already
if lcldb is None:
lcldb = self.db.getclassdb(lcn)
may collide with the names of existing properties, or a ValueError
is raised before any properties have been added.
"""
- for key in properties.keys():
- if self.properties.has_key(key):
- raise ValueError, key
+ for key in properties:
+ if key in self.properties:
+ raise ValueError(key)
self.properties.update(properties)
def index(self, nodeid):
""" Add (or refresh) the node to search indexes """
# find all the String properties that have indexme
- for prop, propclass in self.getprops().items():
+ for prop, propclass in self.getprops().iteritems():
if isinstance(propclass, hyperdb.String) and propclass.indexme:
# index them under (classname, nodeid, property)
try:
Return the nodeid of the node imported.
"""
if self.db.journaltag is None:
- raise hyperdb.DatabaseError, _('Database open read-only')
+ raise hyperdb.DatabaseError(_('Database open read-only'))
properties = self.getprops()
# make the new node's property map
date = date.get_tuple()
if action == 'set':
export_data = {}
- for propname, value in params.items():
- if not properties.has_key(propname):
+ for propname, value in params.iteritems():
+ if propname not in properties:
# property no longer in the schema
continue
value = str(value)
export_data[propname] = value
params = export_data
- l = [nodeid, date, user, action, params]
- r.append(map(repr, l))
+ r.append([repr(nodeid), repr(date), repr(user),
+ repr(action), repr(params)])
return r
def import_journals(self, entries):
properties = self.getprops()
d = {}
for l in entries:
- l = map(eval, l)
- nodeid, jdate, user, action, params = l
+ nodeid, jdate, user, action, params = tuple(map(eval, l))
r = d.setdefault(nodeid, [])
if action == 'set':
- for propname, value in params.items():
+ for propname, value in params.iteritems():
prop = properties[propname]
if value is None:
pass
params[propname] = value
r.append((nodeid, date.Date(jdate), user, action, params))
- for nodeid, l in d.items():
+ for nodeid, l in d.iteritems():
self.db.setjournal(self.classname, nodeid, l)
class FileClass(hyperdb.FileClass, Class):
"""The newly-created class automatically includes the "content"
and "type" properties.
"""
- if not properties.has_key('content'):
+ if 'content' not in properties:
properties['content'] = hyperdb.String(indexme='yes')
- if not properties.has_key('type'):
+ if 'type' not in properties:
properties['type'] = hyperdb.String()
Class.__init__(self, db, classname, **properties)
if propname == 'content':
try:
return self.db.getfile(self.classname, nodeid, None)
- except IOError, (strerror):
+ except IOError, strerror:
# XXX by catching this we don't see an error in the log.
return 'ERROR reading file: %s%s\n%s\n%s'%(
self.classname, nodeid, poss_msg, strerror)
# create the oldvalues dict - fill in any missing values
oldvalues = copy.deepcopy(self.db.getnode(self.classname, itemid))
- for name,prop in self.getprops(protected=0).items():
- if oldvalues.has_key(name):
+ for name, prop in self.getprops(protected=0).iteritems():
+ if name in oldvalues:
continue
if isinstance(prop, hyperdb.Multilink):
oldvalues[name] = []
# now remove the content property so it's not stored in the db
content = None
- if propvalues.has_key('content'):
+ if 'content' in propvalues:
content = propvalues['content']
del propvalues['content']
Use the content-type property for the content property.
"""
# find all the String properties that have indexme
- for prop, propclass in self.getprops().items():
+ for prop, propclass in self.getprops().iteritems():
if prop == 'content' and propclass.indexme:
mime_type = self.get(nodeid, 'type', self.default_mime_type)
self.db.indexer.add_text((self.classname, nodeid, 'content'),
dictionary attempts to specify any of these properties or a
"creation" or "activity" property, a ValueError is raised.
"""
- if not properties.has_key('title'):
+ if 'title' not in properties:
properties['title'] = hyperdb.String(indexme='yes')
- if not properties.has_key('messages'):
+ if 'messages' not in properties:
properties['messages'] = hyperdb.Multilink("msg")
- if not properties.has_key('files'):
+ if 'files' not in properties:
properties['files'] = hyperdb.Multilink("file")
- if not properties.has_key('nosy'):
+ if 'nosy' not in properties:
# note: journalling is turned off as it really just wastes
# space. this behaviour may be overridden in an instance
properties['nosy'] = hyperdb.Multilink("user", do_journal="no")
- if not properties.has_key('superseder'):
+ if 'superseder' not in properties:
properties['superseder'] = hyperdb.Multilink(classname)
Class.__init__(self, db, classname, **properties)
index 9e9d278ecf38829b3b5581639800672b29aed0f5..e066083eaecc2bb2db9f819c0b3ea75a92373f76 100644 (file)
def connection_dict(config, dbnamestr=None):
''' read_default_group is MySQL-specific, ignore it '''
d = rdbms_common.connection_dict(config, dbnamestr)
- if d.has_key('read_default_group'):
+ if 'read_default_group' in d:
del d['read_default_group']
- if d.has_key('read_default_file'):
+ if 'read_default_file' in d:
del d['read_default_file']
return d
try:
conn = psycopg.connect(**template1)
except psycopg.OperationalError, message:
- raise hyperdb.DatabaseError, message
+ raise hyperdb.DatabaseError(message)
conn.set_isolation_level(0)
cursor = conn.cursor()
return
finally:
conn.close()
- raise RuntimeError, '10 attempts to create database failed'
+ raise RuntimeError('10 attempts to create database failed')
def pg_command(cursor, command):
'''Execute the postgresql command, which may be blocked by some other
except psycopg.ProgrammingError, err:
response = str(err).split('\n')[0]
if response.find('FATAL') != -1:
- raise RuntimeError, response
+ raise RuntimeError(response)
else:
msgs = [
'is being accessed by other users',
if can_retry:
time.sleep(1)
return 0
- raise RuntimeError, response
+ raise RuntimeError(response)
return 1
def db_exists(config):
try:
conn = psycopg.connect(**db)
except psycopg.OperationalError, message:
- raise hyperdb.DatabaseError, message
+ raise hyperdb.DatabaseError(message)
cursor = conn.cursor()
def add_actor_column(self):
# update existing tables to have the new actor column
tables = self.database_schema['tables']
- for name in tables.keys():
+ for name in tables:
self.sql('ALTER TABLE _%s add __actor VARCHAR(255)'%name)
def __repr__(self):
rdbms_common.Database.clear(self)
# reset the sequences
- for cn in self.classes.keys():
+ for cn in self.classes:
self.cursor.execute('DROP SEQUENCE _%s_ids'%cn)
self.cursor.execute('CREATE SEQUENCE _%s_ids'%cn)
index 245240f878142323f84a37c9b297581ff4714c9d..c947eb3392d74b1e930cbee525bcc7a0ed3e3dfa 100644 (file)
# update existing tables to have the new actor column
tables = self.database_schema['tables']
for classname, spec in self.classes.items():
- if tables.has_key(classname):
+ if classname in tables:
dbspec = tables[classname]
self.update_class(spec, dbspec, force=1, adding_v2=1)
# we've updated - don't try again
SQLite doesn't have ALTER TABLE, so we have to copy and
regenerate the tables with the new schema.
"""
- new_has = spec.properties.has_key
new_spec = spec.schema()
new_spec[1].sort()
old_spec[1].sort()
old_has = {}
for name, prop in old_spec[1]:
old_has[name] = 1
- if new_has(name) or not isinstance(prop, hyperdb.Multilink):
+ if name in spec.properties or not isinstance(prop, hyperdb.Multilink):
continue
# it's a multilink, and it's been removed - drop the old
# table. First drop indexes.
self.drop_multilink_table_indexes(spec.classname, name)
sql = 'drop table %s_%s'%(spec.classname, prop)
self.sql(sql)
- old_has = old_has.has_key
# now figure how we populate the new table
if adding_v2:
for propname,x in new_spec[1]:
prop = properties[propname]
if isinstance(prop, hyperdb.Multilink):
- if not old_has(propname):
+ if propname not in old_has:
# we need to create the new table
self.create_multilink_table(spec, propname)
elif force:
(%s, %s)"""%(tn, self.arg, self.arg)
for linkid, nodeid in rows:
self.sql(sql, (int(linkid), int(nodeid)))
- elif old_has(propname):
+ elif propname in old_has:
# we copy this col over from the old table
fetch.append('_'+propname)
elif isinstance(prop, hyperdb.Interval):
inscols.append('_'+propname)
inscols.append('__'+propname+'_int__')
- elif old_has(propname):
+ elif propname in old_has:
# we copy this col over from the old table
inscols.append('_'+propname)
v = hyperdb.Interval(entry[name]).as_seconds()
except IndexError:
v = None
- elif entry.has_key(name):
+ elif name in entry:
v = hyperdb.Interval(entry[name]).as_seconds()
else:
v = None
v = entry[name]
except IndexError:
v = None
- elif (sqlite_version == 1 and entry.has_key(name)):
+ elif (sqlite_version == 1 and name in entry):
v = entry[name]
else:
v = None
""" If there's NO matches to a fetch, sqlite returns NULL
instead of nothing
"""
- return filter(None, rdbms_common.Class.filter(self, search_matches,
- filterspec, sort=sort, group=group))
+ return [f for f in rdbms_common.Class.filter(self, search_matches,
+ filterspec, sort=sort, group=group) if f]
class Class(sqliteClass, rdbms_common.Class):
pass
index e5429d33b2fb1e9dfe7ce273eb07085195dbad25..93431277060fc331a74a7b127d92f11a3574c20f 100644 (file)
return {}
designator_propname = {}
- for nm, propclass in klass.getprops().items():
+ for nm, propclass in klass.getprops().iteritems():
if _isLink(propclass):
designator_propname.setdefault(propclass.classname,
[]).append(nm)
# and files
nodeids = {} # this is the answer
propspec = {} # used to do the klass.find
- for l in designator_propname.values():
+ for l in designator_propname.itervalues():
for propname in l:
propspec[propname] = {} # used as a set (value doesn't matter)
# skip this result if we don't care about this class/property
classname = entry[0]
property = entry[2]
- if ignore.has_key((classname, property)):
+ if (classname, property) in ignore:
continue
# if it's a property on klass, it's easy
# backends as that can cause problems down the track)
nodeid = str(entry[1])
if classname == klass.classname:
- if not nodeids.has_key(nodeid):
+ if nodeid not in nodeids:
nodeids[nodeid] = {}
continue
# make sure the class is a linked one, otherwise ignore
- if not designator_propname.has_key(classname):
+ if classname not in designator_propname:
continue
# it's a linked class - set up to do the klass.find
propspec[linkprop][nodeid] = 1
# retain only the meaningful entries
- for propname, idset in propspec.items():
+ for propname, idset in propspec.iteritems():
if not idset:
del propspec[propname]
nodeids[resid] = {}
node_dict = nodeids[resid]
# now figure out where it came from
- for linkprop in propspec.keys():
+ for linkprop in propspec:
v = klass.get(resid, linkprop)
# the link might be a Link so deal with a single result or None
if isinstance(propdefs[linkprop], hyperdb.Link):
if v is None: continue
v = [v]
for nodeid in v:
- if propspec[linkprop].has_key(nodeid):
+ if nodeid in propspec[linkprop]:
# OK, this node[propname] has a winner
- if not node_dict.has_key(linkprop):
+ if linkprop not in node_dict:
node_dict[linkprop] = [nodeid]
else:
node_dict[linkprop].append(nodeid)
index 5b166b52e88e616e7e61a8e8907b054c85f77a54..0b8562c6b7353bc8a2bff145cc97aae44d6994f8 100644 (file)
self.load_index()
# remove old entries for this identifier
- if self.files.has_key(identifier):
+ if identifier in self.files:
self.purge_entry(identifier)
# split into words
for word in words:
if self.is_stopword(word):
continue
- if filedict.has_key(word):
+ if word in filedict:
filedict[word] = filedict[word]+1
else:
filedict[word] = 1
# now add to the totals
- for word in filedict.keys():
+ for word in filedict:
# each word has a dict of {identifier: count}
- if self.words.has_key(word):
+ if word in self.words:
entry = self.words[word]
else:
# new word
return {}
if hits is None:
hits = {}
- for k in entry.keys():
- if not self.fileids.has_key(k):
- raise ValueError, 'Index is corrupted: re-generate it'
+ for k in entry:
+ if k not in self.fileids:
+ raise ValueError('Index is corrupted: re-generate it')
hits[k] = self.fileids[k]
else:
# Eliminate hits for every non-match
- for fileid in hits.keys():
- if not entry.has_key(fileid):
+ for fileid in hits:
+ if fileid not in entry:
del hits[fileid]
if hits is None:
return {}
- return hits.values()
+ return list(hits.values())
segments = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#_-!"
def load_index(self, reload=0, wordlist=None):
dbslice = marshal.loads(pickle_str)
if dbslice.get('WORDS'):
# if it has some words, add them
- for word, entry in dbslice['WORDS'].items():
+ for word, entry in dbslice['WORDS'].iteritems():
db['WORDS'][word] = entry
if dbslice.get('FILES'):
# if it has some files, add them
segdicts = {} # Need batch of empty dicts
for segment in letters:
segdicts[segment] = {}
- for word, entry in self.words.items(): # Split into segment dicts
+ for word, entry in self.words.iteritems(): # Split into segment dicts
initchar = word[0].upper()
segdicts[initchar][word] = entry
'''
self.load_index()
- if not self.files.has_key(identifier):
+ if identifier not in self.files:
return
file_index = self.files[identifier][0]
del self.fileids[file_index]
# The much harder part, cleanup the word index
- for key, occurs in self.words.items():
- if occurs.has_key(file_index):
+ for key, occurs in self.words.iteritems():
+ if file_index in occurs:
del occurs[file_index]
# save needed
index 70e522b915823b11be46e5cfd3cca568791e92de..a5886ff780d11ea395cb7984cb35332e4fdbf0e0 100644 (file)
sql = sql%(' '.join(join_list), self.db.arg, ' '.join(match_list))
self.db.cursor.execute(sql, l)
- r = map(lambda x: x[0], self.db.cursor.fetchall())
+ r = [x[0] for x in self.db.cursor.fetchall()]
if not r:
return []
index 4fe955f2e169031ec7cb439f3bd70cea36af6169..5702609f2a41666d80f5bccb1ec10080c794ce9e 100644 (file)
'''
__docformat__ = 'restructuredtext'
-import portalocker
+from roundup.backends import portalocker
def acquire_lock(path, block=1):
'''Acquire a lock for the given path
'''
- import portalocker
file = open(path, 'w')
if block:
portalocker.lock(file, portalocker.LOCK_EX)
index 30957c0b14ef0c07aa1e07cb06ae47a2b0a134b4..18b2748450f295f6d39e42c49a8d28d1be8023a4 100644 (file)
if __name__ == '__main__':
from time import time, strftime, localtime
import sys
- import portalocker
log = open('log.txt', "a+")
- portalocker.lock(log, portalocker.LOCK_EX)
+ lock(log, LOCK_EX)
timestamp = strftime("%m/%d/%Y %H:%M:%S\n", localtime(time()))
log.write( timestamp )