summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: 66e7f3a)
raw | patch | inline | side by side (parent: 66e7f3a)
author | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 23 Sep 2002 00:50:32 +0000 (00:50 +0000) | ||
committer | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 23 Sep 2002 00:50:32 +0000 (00:50 +0000) |
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@1205 57a73879-2fb5-44c3-a270-3262357dd7e2
diff --git a/CHANGES.txt b/CHANGES.txt
index a6db56a057557dc98b614da4f080aa08f7bbf0df..0341e96bc1a40b0ca89c4cf6fa273b9c522a3fc6 100644 (file)
--- a/CHANGES.txt
+++ b/CHANGES.txt
- sf 611217 ] menu() has problems when labelprop==None
- verify contents of tracker module when the tracker is opened
- performance improvements in *dbm and sq backends
+- mailgw was missing an "import sys"
2002-09-13 0.5.0 beta2
. all backends now have a .close() method, and it's used everywhere
diff --git a/TODO.txt b/TODO.txt
index 120e2e638aa541cfccd793b591054130c7c8020a..0fbb7b9ed4aa8d700870045e5c1eb717eab07ad2 100644 (file)
--- a/TODO.txt
+++ b/TODO.txt
bug mailgw some f*ked mailers QUOTE their Re; "Re: "[issue1] bla blah""
bug docs need to mention somewhere how sorting works
-bug web :multilink isn't working
-bug docs mention not putting spaces in tracker URL aliases
======= ========= =============================================================
diff --git a/cgi-bin/roundup.cgi b/cgi-bin/roundup.cgi
index 2fb844dc4aeac30ca6aab67f58b70c352d93a374..1518c400b98111965ddeb1bc662dddc9103afc2b 100755 (executable)
--- a/cgi-bin/roundup.cgi
+++ b/cgi-bin/roundup.cgi
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: roundup.cgi,v 1.32 2002-09-16 22:37:26 richard Exp $
+# $Id: roundup.cgi,v 1.33 2002-09-23 00:50:32 richard Exp $
# python version check
from roundup import version_check
# "NAME=DIR<sep>NAME2=DIR2<sep>...", where <sep> is the directory path
# separator (";" on Windows, ":" on Unix).
+# Make sure the NAME part doesn't include any url-unsafe characters like
+# spaces, as these confuse the cookie handling in browsers like IE.
+
# ROUNDUP_LOG is the name of the logfile; if it's empty or does not exist,
# logging is turned off (unless you changed the default below).
index 0fdcf28e40bd12eb71c0f4b062a11c1e7606cfde..a039f1598d17b0b030c7e1233cecc04846ecc25a 100644 (file)
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-#$Id: back_anydbm.py,v 1.83 2002-09-20 05:08:00 richard Exp $
+#$Id: back_anydbm.py,v 1.84 2002-09-23 00:50:32 richard Exp $
'''
This module defines a backend that saves the hyperdatabase in a database
chosen by anydbm. It is guaranteed to always be available in python
# save the indexer state
self.indexer.save_index()
+ self.clearCache()
+
+ def clearCache(self):
# all transactions committed, back to normal
self.cache = {}
self.dirtynodes = {}
index d39234f0fe1eaac9a1033143fae9f9edda3a8e57..e53b965cb90dda4a7fe4790877f5519029b6cf0c 100644 (file)
-# $Id: rdbms_common.py,v 1.9 2002-09-20 05:08:00 richard Exp $
+# $Id: rdbms_common.py,v 1.10 2002-09-23 00:50:32 richard Exp $
# standard python modules
import sys, os, time, re, errno, weakref, copy
# open a connection to the database, creating the "conn" attribute
self.open_connection()
+ def clearCache(self):
+ self.cache = {}
+ self.cache_lru = []
+
def open_connection(self):
''' Open a connection to the database, creating it if necessary
'''
index 054e29344b2793c1c1b79997ed48aee1eb816282..25b605cfd9ffe4406228376e2f58281242f94c34 100644 (file)
"""
-__version__='$Revision: 1.6 $'[11:-2]
+__version__='$Revision: 1.7 $'[11:-2]
import re, sys
from TALES import Engine, CompilerError, _valid_name, NAME_RE, \
name = path.pop()
__traceback_info__ = TraversalError(done, name)
- if isinstance(name, TupleType):
- object = apply(object, name)
- continue
+# if isinstance(name, TupleType):
+# object = apply(object, name)
+# continue
# if name[0] == '_':
# # Never allowed in a URL.
index 5b53a0d5a93a95fffce06ab7b2ab19c6699ba214..8b49efcf450b0a1ca6998db868f82c39f9d9a1c3 100644 (file)
#
""" HTTP Server that serves roundup.
-$Id: roundup_server.py,v 1.10 2002-09-10 03:01:19 richard Exp $
+$Id: roundup_server.py,v 1.11 2002-09-23 00:50:32 richard Exp $
"""
# python version check
## Configuration
#
-# This indicates where the Roundup instance lives
+# This indicates where the Roundup trackers live. They're given as NAME ->
+# TRACKER_HOME, where the NAME part is used in the URL to select the
+# appropriate reacker.
+# Make sure the NAME part doesn't include any url-unsafe characters like
+# spaces, as these confuse the cookie handling in browsers like IE.
TRACKER_HOMES = {
'bar': '/tmp/bar',
}
@@ -182,6 +186,8 @@ roundup-server [-n hostname] [-p port] [-l file] [-d file] [name=instance home]*
"roundup-admin init". You may specify any number of these name=home
pairs on the command-line. For convenience, you may edit the
TRACKER_HOMES variable in the roundup-server file instead.
+ Make sure the name part doesn't include any url-unsafe characters like
+ spaces, as these confuse the cookie handling in browsers like IE.
''')%locals()
sys.exit(0)
index 2c2d8d5e33b1acc6890be33e2df0aaae1a192a70..6b5f0fed925057a69c945d1a5dbf4e3fcb0f5832 100644 (file)
<input type="hidden" name=":template" value="item">
<input type="hidden" name=":required" value="name,type">
-<input type="hidden" name="multilink"
+<input type="hidden" name=":multilink"
tal:condition="python:request.form.has_key(':multilink')"
tal:attributes="value request/form/:multilink/value">
diff --git a/test/benchmark.py b/test/benchmark.py
index 496ce65542430b2d0da18e0b70bd63575f0e67fa..99f58b9da6fe800d203586bf75207f6acefecbe9 100644 (file)
--- a/test/benchmark.py
+++ b/test/benchmark.py
session = module.Class(db, 'session', title=String())
session.disableJournalling()
db.post_init()
- status.create(name="unread")
- status.create(name="in-progress")
- status.create(name="testing")
- status.create(name="resolved")
- user.create(username='one')
- user.create(username='two')
db.commit()
class config:
- DATABASE='_test_dir'
+ DATABASE='_benchmark'
GADFLY_DATABASE = ('test', DATABASE)
MAILHOST = 'localhost'
MAIL_DOMAIN = 'fill.me.in.'
except ImportError:
return
- if os.path.exists(config.DATABASE):
- shutil.rmtree(config.DATABASE)
-
times = []
- db = backend.Database(config, 'test')
- setupSchema(db, backend)
- # create a whole bunch of stuff
- for i in range(numissues):
- db.issue.create(**{'title': 'issue %s'%i})
- for j in range(10):
- db.issue.set(str(i+1), status='2', assignedto='2', nosy=[])
- db.issue.set(str(i+1), status='1', assignedto='1', nosy=['1','2'])
- db.user.create(**{'username': 'user %s'%i})
- for j in range(10):
- db.user.set(str(i+1), assignable=1)
- db.user.set(str(i+1), assignable=0)
- db.commit()
+ config.DATABASE = os.path.join('_benchmark', '%s-%s'%(backendname,
+ numissues))
+ if not os.path.exists(config.DATABASE):
+ db = backend.Database(config, 'admin')
+ setupSchema(db, backend)
+ # create a whole bunch of stuff
+ db.user.create(**{'username': 'admin'})
+ db.status.create(name="unread")
+ db.status.create(name="in-progress")
+ db.status.create(name="testing")
+ db.status.create(name="resolved")
+ pc = -1
+ for i in range(numissues):
+ db.user.create(**{'username': 'user %s'%i})
+ for j in range(10):
+ db.user.set(str(i+1), assignable=1)
+ db.user.set(str(i+1), assignable=0)
+ db.issue.create(**{'title': 'issue %s'%i})
+ for j in range(10):
+ db.issue.set(str(i+1), status='2', assignedto='2', nosy=[])
+ db.issue.set(str(i+1), status='1', assignedto='1',
+ nosy=['1','2'])
+ if (i*100/numissues) != pc:
+ pc = (i*100/numissues)
+ sys.stdout.write("%d%%\r"%pc)
+ sys.stdout.flush()
+ db.commit()
+ else:
+ db = backend.Database(config, 'admin')
+ setupSchema(db, backend)
+
sys.stdout.write('%7s: %-6d'%(backendname, numissues))
sys.stdout.flush()
times.append(('start', time()))
# fetch
+ db.clearCache()
for i in db.issue.list():
db.issue.get(i, 'title')
times.append(('fetch', time()))
# journals
+ db.clearCache()
for i in db.issue.list():
db.issue.history(i)
times.append(('journal', time()))
# "calculated" props
+ db.clearCache()
for i in db.issue.list():
db.issue.get(i, 'activity')
db.issue.get(i, 'creator')
times.append(('jprops', time()))
# lookup
+ db.clearCache()
for i in range(numissues):
db.user.lookup('user %s'%i)
times.append(('lookup', time()))
# filter
+ db.clearCache()
+ for i in range(100):
+ db.issue.filter(None, {'assignedto': '1', 'title':'issue'},
+ ('+', 'activity'), ('+', 'status'))
+ times.append(('filter', time()))
+
+ # filter with multilink
+ db.clearCache()
for i in range(100):
db.issue.filter(None, {'nosy': ['1'], 'assignedto': '1',
'title':'issue'}, ('+', 'activity'), ('+', 'status'))
- times.append(('filter', time()))
+ times.append(('filtml', time()))
# results
last = None
if __name__ == '__main__':
# 0 1 2 3 4 5 6
# 01234567890123456789012345678901234567890123456789012345678901234
- print 'Test name fetch journl jprops lookup filter TOTAL '
+ print 'Test name fetch journl jprops lookup filter filtml TOTAL '
for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
main(name)
for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
main(name, numissues=20)
-# for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
-# main(name, numissues=100)
+ for name in 'anydbm bsddb bsddb3 metakit sqlite'.split():
+ main(name, numissues=100)