summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: b061d3e)
raw | patch | inline | side by side (parent: b061d3e)
author | schlatterbeck <schlatterbeck@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 21 Mar 2011 20:44:39 +0000 (20:44 +0000) | ||
committer | schlatterbeck <schlatterbeck@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Mon, 21 Mar 2011 20:44:39 +0000 (20:44 +0000) |
previously we materialized each multilink in a Node -- this creates an
SQL query for each multilink (e.g. 'files' and 'messages' for each
line in the issue index display) -- even if the multilinks aren't
displayed. Now we compute multilinks only if they're accessed (and
keep them cached).
- Add a filter_iter similar to the existing filter call. This feature is
considered experimental. This is currently not used in the
web-interface but passes all tests for the filter call except sorting
by Multilinks (which isn't supported by SQL and isn't a sane concept
anyway). When using filter_iter instead of filter this saves a *lot*
of SQL queries: Filter returns only the IDs of Nodes in the database,
the additional content of a Node has to be fetched in a separate SQL
call. The new filter_iter also returns the IDs of Nodes (one by one,
it's an iterator) but pre-seeds the cache with the content of the
Node. The information needed for seeding the cache is retrieved in the
same SQL query as the ids.
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk@4581 57a73879-2fb5-44c3-a270-3262357dd7e2
SQL query for each multilink (e.g. 'files' and 'messages' for each
line in the issue index display) -- even if the multilinks aren't
displayed. Now we compute multilinks only if they're accessed (and
keep them cached).
- Add a filter_iter similar to the existing filter call. This feature is
considered experimental. This is currently not used in the
web-interface but passes all tests for the filter call except sorting
by Multilinks (which isn't supported by SQL and isn't a sane concept
anyway). When using filter_iter instead of filter this saves a *lot*
of SQL queries: Filter returns only the IDs of Nodes in the database,
the additional content of a Node has to be fetched in a separate SQL
call. The new filter_iter also returns the IDs of Nodes (one by one,
it's an iterator) but pre-seeds the cache with the content of the
Node. The information needed for seeding the cache is retrieved in the
same SQL query as the ids.
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk@4581 57a73879-2fb5-44c3-a270-3262357dd7e2
diff --git a/CHANGES.txt b/CHANGES.txt
index 6f33989585600f700f64e23738347f2f99023abf..9aa6b79a8a55fe32ef664963f7a68e1b13d0ccda 100644 (file)
--- a/CHANGES.txt
+++ b/CHANGES.txt
configured default class, or the -c option to the mailgw, or the class
resulting from mail subject parsing. We also accept multiple -S
options for the same class now. (Ralf)
+- Optimisation: Late evaluation of Multilinks (only in rdbms backends):
+ previously we materialized each multilink in a Node -- this creates an
+ SQL query for each multilink (e.g. 'files' and 'messages' for each
+ line in the issue index display) -- even if the multilinks aren't
+ displayed. Now we compute multilinks only if they're accessed (and
+ keep them cached).
+- Add a filter_iter similar to the existing filter call. This feature is
+ considered experimental. This is currently not used in the
+ web-interface but passes all tests for the filter call except sorting
+ by Multilinks (which isn't supported by SQL and isn't a sane concept
+ anyway). When using filter_iter instead of filter this saves a *lot*
+ of SQL queries: Filter returns only the IDs of Nodes in the database,
+ the additional content of a Node has to be fetched in a separate SQL
+ call. The new filter_iter also returns the IDs of Nodes (one by one,
+ it's an iterator) but pre-seeds the cache with the content of the
+ Node. The information needed for seeding the cache is retrieved in the
+ same SQL query as the ids.
Fixed:
index 72d0c69937a3feb1e445c03563dc614fc0bdaabf..9c7d7219f26cddd5fae6e8e39831ac0e8bbdcdf7 100644 (file)
# keep a cache of the N most recently retrieved rows of any kind
# (classname, nodeid) = row
self.cache_size = config.RDBMS_CACHE_SIZE
- self.cache = {}
- self.cache_lru = []
+ self.clearCache()
self.stats = {'cache_hits': 0, 'cache_misses': 0, 'get_items': 0,
'filtering': 0}
"""
raise NotImplemented
- def sql(self, sql, args=None):
+ def sql(self, sql, args=None, cursor=None):
""" Execute the sql with the optional args.
"""
self.log_debug('SQL %r %r'%(sql, args))
+ if not cursor:
+ cursor = self.cursor
if args:
- self.cursor.execute(sql, args)
+ cursor.execute(sql, args)
else:
- self.cursor.execute(sql)
+ cursor.execute(sql)
def sql_fetchone(self):
""" Fetch a single row. If there's nothing to fetch, return None.
raise ValueError('%r is not a hyperdb property class' % propklass)
+ def _cache_del(self, key):
+ del self.cache[key]
+ self.cache_lru.remove(key)
+
+ def _cache_refresh(self, key):
+ self.cache_lru.remove(key)
+ self.cache_lru.insert(0, key)
+
+ def _cache_save(self, key, node):
+ self.cache[key] = node
+ # update the LRU
+ self.cache_lru.insert(0, key)
+ if len(self.cache_lru) > self.cache_size:
+ del self.cache[self.cache_lru.pop()]
+
def addnode(self, classname, nodeid, node):
""" Add the specified node to its class's db.
"""
# clear this node out of the cache if it's in there
key = (classname, nodeid)
if key in self.cache:
- del self.cache[key]
- self.cache_lru.remove(key)
+ self._cache_del(key)
# figure the values to insert
vals = []
# clear this node out of the cache if it's in there
key = (classname, nodeid)
if key in self.cache:
- del self.cache[key]
- self.cache_lru.remove(key)
+ self._cache_del(key)
cl = self.classes[classname]
props = cl.getprops()
key = (classname, nodeid)
if key in self.cache:
# push us back to the top of the LRU
- self.cache_lru.remove(key)
- self.cache_lru.insert(0, key)
+ self._cache_refresh(key)
if __debug__:
self.stats['cache_hits'] += 1
# return the cached information
value = self.to_hyperdb_value(props[name].__class__)(value)
node[name] = value
-
- # now the multilinks
- for col in mls:
- # get the link ids
- sql = 'select linkid from %s_%s where nodeid=%s'%(classname, col,
- self.arg)
- self.sql(sql, (nodeid,))
- # extract the first column from the result
- # XXX numeric ids
- items = [int(x[0]) for x in self.cursor.fetchall()]
- items.sort ()
- node[col] = [str(x) for x in items]
-
# save off in the cache
key = (classname, nodeid)
- self.cache[key] = node
- # update the LRU
- self.cache_lru.insert(0, key)
- if len(self.cache_lru) > self.cache_size:
- del self.cache[self.cache_lru.pop()]
+ self._cache_save(key, node)
if __debug__:
self.stats['get_items'] += (time.time() - start_t)
else:
return self.db.getuid()
- # get the property (raises KeyErorr if invalid)
+ # get the property (raises KeyError if invalid)
prop = self.properties[propname]
+ # lazy evaluation of Multilink
+ if propname not in d and isinstance(prop, Multilink):
+ sql = 'select linkid from %s_%s where nodeid=%s'%(self.classname,
+ propname, self.db.arg)
+ self.db.sql(sql, (nodeid,))
+ # extract the first column from the result
+ # XXX numeric ids
+ items = [int(x[0]) for x in self.db.cursor.fetchall()]
+ items.sort ()
+ d[propname] = [str(x) for x in items]
+
# handle there being no value in the table for the property
if propname not in d or d[propname] is None:
if default is _marker:
multilink_table, ','.join([self.db.arg] * len(v)))
return where, v, True # True to indicate original
- def filter(self, search_matches, filterspec, sort=[], group=[]):
- """Return a list of the ids of the active nodes in this class that
- match the 'filter' spec, sorted by the group spec and then the
- sort spec
-
- "filterspec" is {propname: value(s)}
-
- "sort" and "group" are [(dir, prop), ...] where dir is '+', '-'
- or None and prop is a prop name or None. Note that for
- backward-compatibility reasons a single (dir, prop) tuple is
- also allowed.
-
- "search_matches" is a container type or None
-
- The filter must match all properties specificed. If the property
- value to match is a list:
-
- 1. String properties must match all elements in the list, and
- 2. Other properties must match any of the elements in the list.
+ def _filter_sql (self, search_matches, filterspec, srt=[], grp=[], retr=0):
+ """ Compute the proptree and the SQL/ARGS for a filter.
+ For argument description see filter below.
+ We return a 3-tuple, the proptree, the sql and the sql-args
+ or None if no SQL is necessary.
+ The flag retr serves to retrieve *all* non-Multilink properties
+ (for filling the cache during a filter_iter)
"""
# we can't match anything if search_matches is empty
if not search_matches and search_matches is not None:
- return []
-
- if __debug__:
- start_t = time.time()
+ return None
icn = self.classname
# figure the WHERE clause from the filterspec
mlfilt = 0 # are we joining with Multilink tables?
- sortattr = self._sortattr (group = group, sort = sort)
- proptree = self._proptree(filterspec, sortattr)
+ sortattr = self._sortattr (group = grp, sort = srt)
+ proptree = self._proptree(filterspec, sortattr, retr)
mlseen = 0
for pt in reversed(proptree.sortattr):
p = pt
pt.attr_sort_done = pt.tree_sort_done = True
proptree.compute_sort_done()
- ordercols = []
- auxcols = {}
+ cols = ['_%s.id'%icn]
mlsort = []
rhsnum = 0
for p in proptree:
- oc = None
+ rc = ac = oc = None
cn = p.classname
ln = p.uniqname
pln = p.parent.uniqname
k = p.name
v = p.val
propclass = p.propclass
- if p.sort_type > 0:
- oc = ac = '_%s._%s'%(pln, k)
+ if p.parent == proptree and p.name == 'id' \
+ and 'retrieve' in p.need_for:
+ p.sql_idx = 0
+ if 'sort' in p.need_for or 'retrieve' in p.need_for:
+ rc = oc = ac = '_%s._%s'%(pln, k)
if isinstance(propclass, Multilink):
- if p.sort_type < 2:
+ if 'search' in p.need_for:
mlfilt = 1
tn = '%s_%s'%(pcn, k)
if v in ('-1', ['-1'], []):
else:
where.append('%s.linkid=%s'%(tn, a))
args.append(v)
- if p.sort_type > 0:
+ if 'sort' in p.need_for:
assert not p.attr_sort_done and not p.sort_ids_needed
elif k == 'id':
- if p.sort_type < 2:
+ if 'search' in p.need_for:
if isinstance(v, type([])):
# If there are no permitted values, then the
# where clause will always be false, and we
else:
where.append('_%s.%s=%s'%(pln, k, a))
args.append(v)
- if p.sort_type > 0:
- oc = ac = '_%s.id'%pln
+ if 'sort' in p.need_for or 'retrieve' in p.need_for:
+ rc = oc = ac = '_%s.id'%pln
elif isinstance(propclass, String):
- if p.sort_type < 2:
+ if 'search' in p.need_for:
if not isinstance(v, type([])):
v = [v]
+' and '.join(["_%s._%s LIKE '%s'"%(pln, k, s) for s in v])
+')')
# note: args are embedded in the query string now
- if p.sort_type > 0:
+ if 'sort' in p.need_for:
oc = ac = 'lower(_%s._%s)'%(pln, k)
elif isinstance(propclass, Link):
- if p.sort_type < 2:
+ if 'search' in p.need_for:
if p.children:
- if p.sort_type == 0:
+ if 'sort' not in p.need_for:
frum.append('_%s as _%s' % (cn, ln))
where.append('_%s._%s=_%s.id'%(pln, k, ln))
if p.has_values:
else:
where.append('_%s._%s=%s'%(pln, k, a))
args.append(v)
- if p.sort_type > 0:
+ if 'sort' in p.need_for:
lp = p.cls.labelprop()
oc = ac = '_%s._%s'%(pln, k)
if lp != 'id':
- if p.tree_sort_done and p.sort_type > 0:
+ if p.tree_sort_done:
loj.append(
'LEFT OUTER JOIN _%s as _%s on _%s._%s=_%s.id'%(
cn, ln, pln, k, ln))
oc = '_%s._%s'%(ln, lp)
- elif isinstance(propclass, Date) and p.sort_type < 2:
+ if 'retrieve' in p.need_for:
+ rc = '_%s._%s'%(pln, k)
+ elif isinstance(propclass, Date) and 'search' in p.need_for:
dc = self.db.to_sql_value(hyperdb.Date)
if isinstance(v, type([])):
s = ','.join([a for x in v])
pass
elif isinstance(propclass, Interval):
# filter/sort using the __<prop>_int__ column
- if p.sort_type < 2:
+ if 'search' in p.need_for:
if isinstance(v, type([])):
s = ','.join([a for x in v])
where.append('_%s.__%s_int__ in (%s)'%(pln, k, s))
except ValueError:
# If range creation fails - ignore search parameter
pass
- if p.sort_type > 0:
+ if 'sort' in p.need_for:
oc = ac = '_%s.__%s_int__'%(pln,k)
- elif isinstance(propclass, Boolean) and p.sort_type < 2:
+ if 'retrieve' in p.need_for:
+ rc = '_%s._%s'%(pln,k)
+ elif isinstance(propclass, Boolean) and 'search' in p.need_for:
if type(v) == type(""):
v = v.split(',')
if type(v) != type([]):
s = ','.join([a for x in v])
where.append('_%s._%s in (%s)'%(pln, k, s))
args = args + bv
- elif p.sort_type < 2:
+ elif 'search' in p.need_for:
if isinstance(v, type([])):
s = ','.join([a for x in v])
where.append('_%s._%s in (%s)'%(pln, k, s))
args.append(v)
if oc:
if p.sort_ids_needed:
- auxcols[ac] = p
+ if rc == ac:
+ p.sql_idx = len(cols)
+ p.auxcol = len(cols)
+ cols.append(ac)
if p.tree_sort_done and p.sort_direction:
- # Don't select top-level id twice
- if p.name != 'id' or p.parent != proptree:
- ordercols.append(oc)
+ # Don't select top-level id or multilink twice
+ if (not p.sort_ids_needed or ac != oc) and (p.name != 'id'
+ or p.parent != proptree):
+ if rc == oc:
+ p.sql_idx = len(cols)
+ cols.append(oc)
desc = ['', ' desc'][p.sort_direction == '-']
# Some SQL dbs sort NULL values last -- we want them first.
if (self.order_by_null_values and p.name != 'id'):
nv = self.order_by_null_values % oc
- ordercols.append(nv)
+ cols.append(nv)
p.orderby.append(nv + desc)
p.orderby.append(oc + desc)
+ if 'retrieve' in p.need_for and p.sql_idx is None:
+ assert(rc)
+ p.sql_idx = len(cols)
+ cols.append (rc)
props = self.getprops()
if mlfilt:
# we're joining tables on the id, so we will get dupes if we
# don't distinct()
- cols = ['distinct(_%s.id)'%icn]
- else:
- cols = ['_%s.id'%icn]
- if ordercols:
- cols = cols + ordercols
+ cols[0] = 'distinct(_%s.id)'%icn
+
order = []
# keep correct sequence of order attributes.
for sa in proptree.sortattr:
order = ' order by %s'%(','.join(order))
else:
order = ''
- for o, p in auxcols.iteritems ():
- cols.append (o)
- p.auxcol = len (cols) - 1
cols = ','.join(cols)
loj = ' '.join(loj)
sql = 'select %s from %s %s %s%s'%(cols, frum, loj, where, order)
args = tuple(args)
__traceback_info__ = (sql, args)
+ return proptree, sql, args
+
+ def filter(self, search_matches, filterspec, sort=[], group=[]):
+ """Return a list of the ids of the active nodes in this class that
+ match the 'filter' spec, sorted by the group spec and then the
+ sort spec
+
+ "filterspec" is {propname: value(s)}
+
+ "sort" and "group" are [(dir, prop), ...] where dir is '+', '-'
+ or None and prop is a prop name or None. Note that for
+ backward-compatibility reasons a single (dir, prop) tuple is
+ also allowed.
+
+ "search_matches" is a container type or None
+
+ The filter must match all properties specificed. If the property
+ value to match is a list:
+
+ 1. String properties must match all elements in the list, and
+ 2. Other properties must match any of the elements in the list.
+ """
+ if __debug__:
+ start_t = time.time()
+
+ sq = self._filter_sql (search_matches, filterspec, sort, group)
+ # nothing to match?
+ if sq is None:
+ return []
+ proptree, sql, args = sq
+
self.db.sql(sql, args)
l = self.db.sql_fetchall()
# Compute values needed for sorting in proptree.sort
- for p in auxcols.itervalues():
- p.sort_ids = p.sort_result = [row[p.auxcol] for row in l]
+ for p in proptree:
+ if hasattr(p, 'auxcol'):
+ p.sort_ids = p.sort_result = [row[p.auxcol] for row in l]
# return the IDs (the first column)
# XXX numeric ids
l = [str(row[0]) for row in l]
self.db.stats['filtering'] += (time.time() - start_t)
return l
+ def filter_iter(self, search_matches, filterspec, sort=[], group=[]):
+ """Iterator similar to filter above with same args.
+ Limitation: We don't sort on multilinks.
+ This uses an optimisation: We put all nodes that are in the
+ current row into the node cache. Then we return the node id.
+ That way a fetch of a node won't create another sql-fetch (with
+ a join) from the database because the nodes are already in the
+ cache. We're using our own temporary cursor.
+ """
+ sq = self._filter_sql(search_matches, filterspec, sort, group, retr=1)
+ # nothing to match?
+ if sq is None:
+ return
+ proptree, sql, args = sq
+ cursor = self.db.conn.cursor()
+ self.db.sql(sql, args, cursor)
+ classes = {}
+ for p in proptree:
+ if 'retrieve' in p.need_for:
+ cn = p.parent.classname
+ ptid = p.parent.id # not the nodeid!
+ key = (cn, ptid)
+ if key not in classes:
+ classes[key] = {}
+ name = p.name
+ assert (name)
+ classes[key][name] = p
+ while True:
+ row = cursor.fetchone()
+ if not row: break
+ # populate cache with current items
+ for (classname, ptid), pt in classes.iteritems():
+ nodeid = str(row[pt['id'].sql_idx])
+ key = (classname, nodeid)
+ if key in self.db.cache:
+ self.db._cache_refresh(key)
+ continue
+ node = {}
+ for propname, p in pt.iteritems():
+ value = row[p.sql_idx]
+ if value is not None:
+ cls = p.propclass.__class__
+ value = self.db.to_hyperdb_value(cls)(value)
+ node[propname] = value
+ self.db._cache_save(key, node)
+ yield str(row[0])
+
def filter_sql(self, sql):
"""Return a list of the ids of the items in this class that match
the SQL provided. The SQL is a complete "select" statement.
diff --git a/roundup/date.py b/roundup/date.py
index af8d1d533c155a4bf2c6ca055a640f342cc6c5ef..1f3f977b7421645beec00a3ca076aa1312832fc8 100644 (file)
--- a/roundup/date.py
+++ b/roundup/date.py
serving as translation functions.
"""
self.setTranslator(translator)
+ # Python 2.3+ datetime object
+ # common case when reading from database: avoid double-conversion
+ if isinstance(spec, datetime.datetime):
+ if offset == 0:
+ self.year, self.month, self.day, self.hour, self.minute, \
+ self.second = spec.timetuple()[:6]
+ else:
+ TZ = get_timezone(tz)
+ self.year, self.month, self.day, self.hour, self.minute, \
+ self.second = TZ.localize(spec).utctimetuple()[:6]
+ self.second += spec.microsecond/1000000.
+ return
+
if type(spec) == type(''):
self.set(spec, offset=offset, add_granularity=add_granularity)
return
- elif isinstance(spec, datetime.datetime):
- # Python 2.3+ datetime object
- y,m,d,H,M,S,x,x,x = spec.timetuple()
- S += spec.microsecond/1000000.
- spec = (y,m,d,H,M,S,x,x,x)
elif hasattr(spec, 'tuple'):
spec = spec.tuple()
elif isinstance(spec, Date):
diff --git a/roundup/hyperdb.py b/roundup/hyperdb.py
index 45a8c6c784f06591465aabe62d55847cb352c647..0c86ede0636fb88e9e8a91ba88f8eea784bb9af4 100644 (file)
--- a/roundup/hyperdb.py
+++ b/roundup/hyperdb.py
""" Simple tree data structure for optimizing searching of
properties. Each node in the tree represents a roundup Class
Property that has to be navigated for finding the given search
- or sort properties. The sort_type attribute is used for
- distinguishing nodes in the tree used for sorting or searching: If
- it is 0 for a node, that node is not used for sorting. If it is 1,
- it is used for both, sorting and searching. If it is 2 it is used
- for sorting only.
+ or sort properties. The need_for attribute is used for
+ distinguishing nodes in the tree used for sorting, searching or
+ retrieval: The attribute is a dictionary containing one or several
+ of the values 'sort', 'search', 'retrieve'.
The Proptree is also used for transitively searching attributes for
backends that do not support transitive search (e.g. anydbm). The
_val attribute with set_val is used for this.
"""
- def __init__(self, db, cls, name, props, parent = None):
+ def __init__(self, db, cls, name, props, parent=None, retr=False):
self.db = db
self.name = name
self.props = props
self.children = []
self.sortattr = []
self.propdict = {}
- self.sort_type = 0
+ self.need_for = {'search' : True}
self.sort_direction = None
self.sort_ids = None
self.sort_ids_needed = False
self.tree_sort_done = False
self.propclass = None
self.orderby = []
+ self.sql_idx = None # index of retrieved column in sql result
if parent:
self.root = parent.root
self.depth = parent.depth + 1
self.root = self
self.seqno = 1
self.depth = 0
- self.sort_type = 1
+ self.need_for['sort'] = True
self.id = self.root.seqno
self.root.seqno += 1
if self.cls:
self.uniqname = '%s%s' % (self.cls.classname, self.id)
if not self.parent:
self.uniqname = self.cls.classname
+ if retr:
+ self.append_retr_props()
- def append(self, name, sort_type = 0):
+ def append(self, name, need_for='search', retr=False):
"""Append a property to self.children. Will create a new
propclass for the child.
"""
if name in self.propdict:
pt = self.propdict[name]
- if sort_type and not pt.sort_type:
- pt.sort_type = 1
+ pt.need_for[need_for] = True
+ if retr and isinstance(pt.propclass, Link):
+ pt.append_retr_props()
return pt
propclass = self.props[name]
cls = None
cls = self.db.getclass(propclass.classname)
props = cls.getprops()
child = self.__class__(self.db, cls, name, props, parent = self)
- child.sort_type = sort_type
+ child.need_for = {need_for : True}
child.propclass = propclass
self.children.append(child)
self.propdict[name] = child
+ if retr and isinstance(child.propclass, Link):
+ child.append_retr_props()
return child
+ def append_retr_props(self):
+ """Append properties for retrieval."""
+ for name, prop in self.cls.getprops(protected=1).iteritems():
+ if isinstance(prop, Multilink):
+ continue
+ self.append(name, need_for='retrieve')
+
def compute_sort_done(self, mlseen=False):
""" Recursively check if attribute is needed for sorting
- (self.sort_type > 0) or all children have tree_sort_done set and
+ ('sort' in self.need_for) or all children have tree_sort_done set and
sort_ids_needed unset: set self.tree_sort_done if one of the conditions
holds. Also remove sort_ids_needed recursively once having seen a
Multilink.
p.compute_sort_done(mlseen)
if not p.tree_sort_done:
self.tree_sort_done = False
- if not self.sort_type:
+ if 'sort' not in self.need_for:
self.tree_sort_done = True
if mlseen:
self.tree_sort_done = False
"""
filterspec = {}
for p in self.children:
- if p.sort_type < 2:
+ if 'search' in p.need_for:
if p.children:
p.search(sort = False)
filterspec[p.name] = p.val
too.
"""
return [p for p in self.children
- if p.sort_type > 0 and (intermediate or p.sort_direction)]
+ if 'sort' in p.need_for and (intermediate or p.sort_direction)]
def __iter__(self):
""" Yield nodes in depth-first order -- visited nodes first """
curdir = sa.sort_direction
idx += 1
sortattr.append (val)
- #print >> sys.stderr, "\nsortattr", sortattr
sortattr = zip (*sortattr)
for dir, i in reversed(zip(directions, dir_idx)):
rev = dir == '-'
"""
raise NotImplementedError
- def _proptree(self, filterspec, sortattr=[]):
+ def _proptree(self, filterspec, sortattr=[], retr=False):
"""Build a tree of all transitive properties in the given
filterspec.
+ If we retrieve (retr is True) linked items we don't follow
+ across multilinks. We also don't follow if the searched value
+ can contain NULL values.
"""
- proptree = Proptree(self.db, self, '', self.getprops())
+ proptree = Proptree(self.db, self, '', self.getprops(), retr=retr)
for key, v in filterspec.iteritems():
keys = key.split('.')
p = proptree
+ mlseen = False
for k in keys:
- p = p.append(k)
+ if isinstance (p.propclass, Multilink):
+ mlseen = True
+ isnull = v == '-1' or v is None
+ nullin = isinstance(v, type([])) and ('-1' in v or None in v)
+ r = retr and not mlseen and not isnull and not nullin
+ p = p.append(k, retr=r)
p.val = v
multilinks = {}
for s in sortattr:
keys = s[1].split('.')
p = proptree
+ mlseen = False
for k in keys:
- p = p.append(k, sort_type = 2)
+ if isinstance (p.propclass, Multilink):
+ mlseen = True
+ r = retr and not mlseen
+ p = p.append(k, need_for='sort', retr=r)
if isinstance (p.propclass, Multilink):
multilinks[p] = True
if p.cls:
- p = p.append(p.cls.orderprop(), sort_type = 2)
+ p = p.append(p.cls.orderprop(), need_for='sort')
if p.sort_direction: # if an orderprop is also specified explicitly
continue
p.sort_direction = s[0]
This implements a non-optimized version of Transitive search
using _filter implemented in a backend class. A more efficient
version can be implemented in the individual backends -- e.g.,
- an SQL backen will want to create a single SQL statement and
+ an SQL backend will want to create a single SQL statement and
override the filter method instead of implementing _filter.
"""
sortattr = self._sortattr(sort = sort, group = group)
proptree.search(search_matches)
return proptree.sort()
+ # non-optimized filter_iter, a backend may chose to implement a
+ # better version that provides a real iterator that pre-fills the
+ # cache for each id returned. Note that the filter_iter doesn't
+ # promise to correctly sort by multilink (which isn't sane to do
+ # anyway).
+ filter_iter = filter
+
def count(self):
"""Get the number of nodes in this class.
diff --git a/test/db_test_base.py b/test/db_test_base.py
index 0ddc8f5d6d472ec50d161581480d713d2035aa04..e508edf226a70ba0af33681258ea4b060d00d416 100644 (file)
--- a/test/db_test_base.py
+++ b/test/db_test_base.py
config.RDBMS_HOST = "localhost"
config.RDBMS_USER = "rounduptest"
config.RDBMS_PASSWORD = "rounduptest"
+#config.RDBMS_TEMPLATE = "template0"
#config.logging = MockNull()
# these TRACKER_WEB and MAIL_DOMAIN values are used in mailgw tests
config.MAIL_DOMAIN = "your.tracker.email.domain.example"
if os.path.exists(config.DATABASE):
shutil.rmtree(config.DATABASE)
+ def open_database(self):
+ self.db = self.module.Database(config, 'admin')
+
+
if os.environ.has_key('LOGGING_LEVEL'):
from roundup import rlog
config.logging = rlog.BasicLogging()
config.logging.setLevel(os.environ['LOGGING_LEVEL'])
config.logging.getLogger('roundup.hyperdb').setFormat('%(message)s')
-class DBTest(MyTestCase):
+class commonDBTest(MyTestCase):
def setUp(self):
# remove previous test, ignore errors
if os.path.exists(config.DATABASE):
self.open_database()
setupSchema(self.db, 1, self.module)
- def open_database(self):
- self.db = self.module.Database(config, 'admin')
+ def iterSetup(self, classname='issue'):
+ cls = getattr(self.db, classname)
+ def filt_iter(*args):
+ """ for checking equivalence of filter and filter_iter """
+ return list(cls.filter_iter(*args))
+ return self.assertEqual, cls.filter, filt_iter
+
+ def filteringSetupTransitiveSearch(self, classname='issue'):
+ u_m = {}
+ k = 30
+ for user in (
+ {'username': 'ceo', 'age': 129},
+ {'username': 'grouplead1', 'age': 29, 'supervisor': '3'},
+ {'username': 'grouplead2', 'age': 29, 'supervisor': '3'},
+ {'username': 'worker1', 'age': 25, 'supervisor' : '4'},
+ {'username': 'worker2', 'age': 24, 'supervisor' : '4'},
+ {'username': 'worker3', 'age': 23, 'supervisor' : '5'},
+ {'username': 'worker4', 'age': 22, 'supervisor' : '5'},
+ {'username': 'worker5', 'age': 21, 'supervisor' : '5'}):
+ u = self.db.user.create(**user)
+ u_m [u] = self.db.msg.create(author = u, content = ' '
+ , date = date.Date ('2006-01-%s' % k))
+ k -= 1
+ i = date.Interval('-1d')
+ for issue in (
+ {'title': 'ts1', 'status': '2', 'assignedto': '6',
+ 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']},
+ {'title': 'ts2', 'status': '1', 'assignedto': '6',
+ 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']},
+ {'title': 'ts4', 'status': '2', 'assignedto': '7',
+ 'priority': '3', 'messages' : [u_m ['7']]},
+ {'title': 'ts5', 'status': '1', 'assignedto': '8',
+ 'priority': '3', 'messages' : [u_m ['8']]},
+ {'title': 'ts6', 'status': '2', 'assignedto': '9',
+ 'priority': '3', 'messages' : [u_m ['9']]},
+ {'title': 'ts7', 'status': '1', 'assignedto': '10',
+ 'priority': '3', 'messages' : [u_m ['10']]},
+ {'title': 'ts8', 'status': '2', 'assignedto': '10',
+ 'priority': '3', 'messages' : [u_m ['10']], 'foo' : i},
+ {'title': 'ts9', 'status': '1', 'assignedto': '10',
+ 'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}):
+ self.db.issue.create(**issue)
+ return self.iterSetup(classname)
+
+
+class DBTest(commonDBTest):
def testRefresh(self):
self.db.refresh_database()
'2008-02-29.00:00:00')
self.assertEquals(self.db.issue.filter(None,
{'deadline': '2008-02-29'}), [nid])
+ self.assertEquals(list(self.db.issue.filter_iter(None,
+ {'deadline': '2008-02-29'})), [nid])
self.db.issue.set(nid, deadline=date.Date('2008-03-01'))
self.assertEquals(str(self.db.issue.get(nid, 'deadline')),
'2008-03-01.00:00:00')
self.assertEquals(self.db.issue.filter(None,
{'deadline': '2008-02-29'}), [])
+ self.assertEquals(list(self.db.issue.filter_iter(None,
+ {'deadline': '2008-02-29'})), [])
def testDateUnset(self):
for commit in (0,1):
self.db.issue.retire(ids[0])
self.assertEqual(len(self.db.issue.stringFind(title='spam')), 1)
- def filteringSetup(self):
+ def filteringSetup(self, classname='issue'):
for user in (
{'username': 'bleep', 'age': 1, 'assignable': True},
{'username': 'blop', 'age': 1.5, 'assignable': True},
{'username': 'blorp', 'age': 2, 'assignable': False}):
self.db.user.create(**user)
- iss = self.db.issue
file_content = ''.join([chr(i) for i in range(255)])
f = self.db.file.create(content=file_content)
for issue in (
'files': [f]}):
self.db.issue.create(**issue)
self.db.commit()
- return self.assertEqual, self.db.issue.filter
+ return self.iterSetup(classname)
def testFilteringID(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1'])
- ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2'])
- ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), [])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'id': '1'}, ('+','id'), (None,None)), ['1'])
+ ae(filt(None, {'id': '2'}, ('+','id'), (None,None)), ['2'])
+ ae(filt(None, {'id': '100'}, ('+','id'), (None,None)), [])
def testFilteringBoolean(self):
- self.filteringSetup()
- ae, filt = self.assertEqual, self.db.user.filter
+ ae, filter, filter_iter = self.filteringSetup('user')
a = 'assignable'
- ae(filt(None, {a: '1'}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: '0'}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: ['1']}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: ['0']}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: ['0','1']}, ('+','id'), (None,None)), ['3','4','5'])
- ae(filt(None, {a: 'True'}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: 'False'}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: ['True']}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: ['False']}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: ['False','True']}, ('+','id'), (None,None)),
- ['3','4','5'])
- ae(filt(None, {a: True}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: False}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: 1}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: 0}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: [1]}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: [0]}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: [0,1]}, ('+','id'), (None,None)), ['3','4','5'])
- ae(filt(None, {a: [True]}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {a: [False]}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {a: [False,True]}, ('+','id'), (None,None)),
- ['3','4','5'])
+ for filt in filter, filter_iter:
+ ae(filt(None, {a: '1'}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: '0'}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: ['1']}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: ['0']}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: ['0','1']}, ('+','id'), (None,None)),
+ ['3','4','5'])
+ ae(filt(None, {a: 'True'}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: 'False'}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: ['True']}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: ['False']}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: ['False','True']}, ('+','id'), (None,None)),
+ ['3','4','5'])
+ ae(filt(None, {a: True}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: False}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: 1}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: 0}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: [1]}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: [0]}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: [0,1]}, ('+','id'), (None,None)), ['3','4','5'])
+ ae(filt(None, {a: [True]}, ('+','id'), (None,None)), ['3','4'])
+ ae(filt(None, {a: [False]}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {a: [False,True]}, ('+','id'), (None,None)),
+ ['3','4','5'])
def testFilteringNumber(self):
- self.filteringSetup()
- ae, filt = self.assertEqual, self.db.user.filter
- ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3'])
- ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4'])
- ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)), ['3','5'])
- ae(filt(None, {'age': 2}, ('+','id'), (None,None)), ['5'])
- ae(filt(None, {'age': [1,2]}, ('+','id'), (None,None)), ['3','5'])
+ ae, filter, filter_iter = self.filteringSetup('user')
+ for filt in filter, filter_iter:
+ ae(filt(None, {'age': '1'}, ('+','id'), (None,None)), ['3'])
+ ae(filt(None, {'age': '1.5'}, ('+','id'), (None,None)), ['4'])
+ ae(filt(None, {'age': '2'}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {'age': ['1','2']}, ('+','id'), (None,None)),
+ ['3','5'])
+ ae(filt(None, {'age': 2}, ('+','id'), (None,None)), ['5'])
+ ae(filt(None, {'age': [1,2]}, ('+','id'), (None,None)), ['3','5'])
def testFilteringString(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1'])
- ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)),
- ['1'])
- ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)),
- ['1'])
- ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)),
- ['1','2','3'])
- ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)),
- [])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'title': ['one']}, ('+','id'), (None,None)), ['1'])
+ ae(filt(None, {'title': ['issue one']}, ('+','id'), (None,None)),
+ ['1'])
+ ae(filt(None, {'title': ['issue', 'one']}, ('+','id'), (None,None)),
+ ['1'])
+ ae(filt(None, {'title': ['issue']}, ('+','id'), (None,None)),
+ ['1','2','3'])
+ ae(filt(None, {'title': ['one', 'two']}, ('+','id'), (None,None)),
+ [])
def testFilteringLink(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['2','3'])
- ae(filt(None, {'assignedto': '-1'}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {'assignedto': None}, ('+','id'), (None,None)), ['3','4'])
- ae(filt(None, {'assignedto': [None]}, ('+','id'), (None,None)),
- ['3','4'])
- ae(filt(None, {'assignedto': ['-1', None]}, ('+','id'), (None,None)),
- ['3','4'])
- ae(filt(None, {'assignedto': ['1', None]}, ('+','id'), (None,None)),
- ['1', '3','4'])
+ ae, filter, filter_iter = self.filteringSetup()
+ a = 'assignedto'
+ grp = (None, None)
+ for filt in filter, filter_iter:
+ ae(filt(None, {'status': '1'}, ('+','id'), grp), ['2','3'])
+ ae(filt(None, {a: '-1'}, ('+','id'), grp), ['3','4'])
+ ae(filt(None, {a: None}, ('+','id'), grp), ['3','4'])
+ ae(filt(None, {a: [None]}, ('+','id'), grp), ['3','4'])
+ ae(filt(None, {a: ['-1', None]}, ('+','id'), grp), ['3','4'])
+ ae(filt(None, {a: ['1', None]}, ('+','id'), grp), ['1', '3','4'])
def testFilteringMultilinkAndGroup(self):
"""testFilteringMultilinkAndGroup:
See roundup Bug 1541128: apparently grouping by something and
searching a Multilink failed with MySQL 5.0
"""
- ae, filt = self.filteringSetup()
- ae(filt(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for f in filter, filter_iter:
+ ae(f(None, {'files': '1'}, ('-','activity'), ('+','status')), ['4'])
def testFilteringRetired(self):
- ae, filt = self.filteringSetup()
+ ae, filter, filter_iter = self.filteringSetup()
self.db.issue.retire('2')
- ae(filt(None, {'status': '1'}, ('+','id'), (None,None)), ['3'])
+ for f in filter, filter_iter:
+ ae(f(None, {'status': '1'}, ('+','id'), (None,None)), ['3'])
def testFilteringMultilink(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4'])
- ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2'])
- ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'),
- ('-', 'deadline')), ['4', '3'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'nosy': '3'}, ('+','id'), (None,None)), ['4'])
+ ae(filt(None, {'nosy': '-1'}, ('+','id'), (None,None)), ['1', '2'])
+ ae(filt(None, {'nosy': ['1','2']}, ('+', 'status'),
+ ('-', 'deadline')), ['4', '3'])
def testFilteringMany(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)),
- ['3'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for f in filter, filter_iter:
+ ae(f(None, {'nosy': '2', 'status': '1'}, ('+','id'), (None,None)),
+ ['3'])
def testFilteringRangeBasic(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'deadline': 'from 2003-02-10 to 2003-02-23'}), ['1','3'])
- ae(filt(None, {'deadline': '2003-02-10; 2003-02-23'}), ['1','3'])
- ae(filt(None, {'deadline': '; 2003-02-16'}), ['2'])
+ ae, filter, filter_iter = self.filteringSetup()
+ d = 'deadline'
+ for f in filter, filter_iter:
+ ae(f(None, {d: 'from 2003-02-10 to 2003-02-23'}), ['1','3'])
+ ae(f(None, {d: '2003-02-10; 2003-02-23'}), ['1','3'])
+ ae(f(None, {d: '; 2003-02-16'}), ['2'])
def testFilteringRangeTwoSyntaxes(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4'])
- ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'deadline': 'from 2003-02-16'}), ['1', '3', '4'])
+ ae(filt(None, {'deadline': '2003-02-16;'}), ['1', '3', '4'])
def testFilteringRangeYearMonthDay(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'deadline': '2002'}), [])
- ae(filt(None, {'deadline': '2003'}), ['1', '2', '3'])
- ae(filt(None, {'deadline': '2004'}), ['4'])
- ae(filt(None, {'deadline': '2003-02-16'}), ['1'])
- ae(filt(None, {'deadline': '2003-02-17'}), [])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'deadline': '2002'}), [])
+ ae(filt(None, {'deadline': '2003'}), ['1', '2', '3'])
+ ae(filt(None, {'deadline': '2004'}), ['4'])
+ ae(filt(None, {'deadline': '2003-02-16'}), ['1'])
+ ae(filt(None, {'deadline': '2003-02-17'}), [])
def testFilteringRangeMonths(self):
- ae, filt = self.filteringSetup()
+ ae, filter, filter_iter = self.filteringSetup()
for month in range(1, 13):
for n in range(1, month+1):
i = self.db.issue.create(title='%d.%d'%(month, n),
self.db.commit()
for month in range(1, 13):
- r = filt(None, dict(deadline='2001-%02d'%month))
- assert len(r) == month, 'month %d != length %d'%(month, len(r))
+ for filt in filter, filter_iter:
+ r = filt(None, dict(deadline='2001-%02d'%month))
+ assert len(r) == month, 'month %d != length %d'%(month, len(r))
def testFilteringRangeInterval(self):
- ae, filt = self.filteringSetup()
- ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1'])
- ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2'])
- ae(filt(None, {'foo': 'from 5:50'}), ['2'])
- ae(filt(None, {'foo': 'to 0:05'}), [])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'foo': 'from 0:50 to 2:00'}), ['1'])
+ ae(filt(None, {'foo': 'from 0:50 to 1d 2:00'}), ['1', '2'])
+ ae(filt(None, {'foo': 'from 5:50'}), ['2'])
+ ae(filt(None, {'foo': 'to 0:05'}), [])
def testFilteringRangeGeekInterval(self):
- ae, filt = self.filteringSetup()
+ ae, filter, filter_iter = self.filteringSetup()
for issue in (
{ 'deadline': date.Date('. -2d')},
{ 'deadline': date.Date('. -1d')},
{ 'deadline': date.Date('. -8d')},
):
self.db.issue.create(**issue)
- ae(filt(None, {'deadline': '-2d;'}), ['5', '6'])
- ae(filt(None, {'deadline': '-1d;'}), ['6'])
- ae(filt(None, {'deadline': '-1w;'}), ['5', '6'])
+ for filt in filter, filter_iter:
+ ae(filt(None, {'deadline': '-2d;'}), ['5', '6'])
+ ae(filt(None, {'deadline': '-1d;'}), ['6'])
+ ae(filt(None, {'deadline': '-1w;'}), ['5', '6'])
def testFilteringIntervalSort(self):
# 1: '1:10'
# 2: '1d'
# 3: None
# 4: '0:10'
- ae, filt = self.filteringSetup()
- # ascending should sort None, 1:10, 1d
- ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2'])
- # descending should sort 1d, 1:10, None
- ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ # ascending should sort None, 1:10, 1d
+ ae(filt(None, {}, ('+','foo'), (None,None)), ['3', '4', '1', '2'])
+ # descending should sort 1d, 1:10, None
+ ae(filt(None, {}, ('-','foo'), (None,None)), ['2', '1', '4', '3'])
def testFilteringStringSort(self):
# 1: 'issue one'
# 2: 'issue two'
# 3: 'issue three'
# 4: 'non four'
- ae, filt = self.filteringSetup()
- ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
- ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
+ ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
# Test string case: For now allow both, w/wo case matching.
# 1: 'issue one'
# 2: 'issue two'
# 3: 'Issue three'
# 4: 'non four'
self.db.issue.set('3', title='Issue three')
- ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
- ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
+ for filt in filter, filter_iter:
+ ae(filt(None, {}, ('+','title')), ['1', '3', '2', '4'])
+ ae(filt(None, {}, ('-','title')), ['4', '2', '3', '1'])
# Obscure bug in anydbm backend trying to convert to number
# 1: '1st issue'
# 2: '2'
# 4: 'non four'
self.db.issue.set('1', title='1st issue')
self.db.issue.set('2', title='2')
- ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4'])
- ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1'])
+ for filt in filter, filter_iter:
+ ae(filt(None, {}, ('+','title')), ['1', '2', '3', '4'])
+ ae(filt(None, {}, ('-','title')), ['4', '3', '2', '1'])
def testFilteringMultilinkSort(self):
# 1: [] Reverse: 1: []
# Note the sort order for the multilink doen't change when
# reversing the sort direction due to the re-sorting of the
# multilink!
- ae, filt = self.filteringSetup()
+ # Note that we don't test filter_iter here, Multilink sort-order
+ # isn't defined for that.
+ ae, filt, dummy = self.filteringSetup()
ae(filt(None, {}, ('+','nosy'), (None,None)), ['1', '2', '4', '3'])
ae(filt(None, {}, ('-','nosy'), (None,None)), ['4', '3', '1', '2'])
# 2: status: 1 "unread" nosy: []
# 3: status: 1 "unread" nosy: ['admin','fred']
# 4: status: 3 "testing" nosy: ['admin','bleep','fred']
- ae, filt = self.filteringSetup()
+ # Note that we don't test filter_iter here, Multilink sort-order
+ # isn't defined for that.
+ ae, filt, dummy = self.filteringSetup()
ae(filt(None, {}, ('+','nosy'), ('+','status')), ['1', '4', '2', '3'])
ae(filt(None, {}, ('-','nosy'), ('+','status')), ['1', '4', '3', '2'])
ae(filt(None, {}, ('+','nosy'), ('-','status')), ['2', '3', '4', '1'])
# 2: status: 1 -> 'u', priority: 3 -> 1
# 3: status: 1 -> 'u', priority: 2 -> 3
# 4: status: 3 -> 't', priority: 2 -> 3
- ae, filt = self.filteringSetup()
- ae(filt(None, {}, ('+','status'), ('+','priority')),
- ['1', '2', '4', '3'])
- ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')),
- ['4', '3'])
- ae(filt(None, {'priority.order':'3'}, ('+','status'), ('+','priority')),
- ['4', '3'])
- ae(filt(None, {'priority':['2','3']}, ('+','priority'), ('+','status')),
- ['1', '4', '2', '3'])
- ae(filt(None, {}, ('+','priority'), ('+','status')),
- ['1', '4', '2', '3'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for filt in filter, filter_iter:
+ ae(filt(None, {}, ('+','status'), ('+','priority')),
+ ['1', '2', '4', '3'])
+ ae(filt(None, {'priority':'2'}, ('+','status'), ('+','priority')),
+ ['4', '3'])
+ ae(filt(None, {'priority.order':'3'}, ('+','status'),
+ ('+','priority')), ['4', '3'])
+ ae(filt(None, {'priority':['2','3']}, ('+','priority'),
+ ('+','status')), ['1', '4', '2', '3'])
+ ae(filt(None, {}, ('+','priority'), ('+','status')),
+ ['1', '4', '2', '3'])
def testFilteringDateSort(self):
# '1': '2003-02-16.22:50'
# '2': '2003-01-01.00:00'
# '3': '2003-02-18'
# '4': '2004-03-08'
- ae, filt = self.filteringSetup()
- # ascending
- ae(filt(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4'])
- # descending
- ae(filt(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2'])
+ ae, filter, filter_iter = self.filteringSetup()
+ for f in filter, filter_iter:
+ # ascending
+ ae(f(None, {}, ('+','deadline'), (None,None)), ['2', '1', '3', '4'])
+ # descending
+ ae(f(None, {}, ('-','deadline'), (None,None)), ['4', '3', '1', '2'])
def testFilteringDateSortPriorityGroup(self):
# '1': '2003-02-16.22:50' 1 => 2
# '2': '2003-01-01.00:00' 3 => 1
# '3': '2003-02-18' 2 => 3
# '4': '2004-03-08' 1 => 2
- ae, filt = self.filteringSetup()
-
- # ascending
- ae(filt(None, {}, ('+','deadline'), ('+','priority')),
- ['2', '1', '3', '4'])
- ae(filt(None, {}, ('-','deadline'), ('+','priority')),
- ['1', '2', '4', '3'])
- # descending
- ae(filt(None, {}, ('+','deadline'), ('-','priority')),
- ['3', '4', '2', '1'])
- ae(filt(None, {}, ('-','deadline'), ('-','priority')),
- ['4', '3', '1', '2'])
-
- def filteringSetupTransitiveSearch(self):
- u_m = {}
- k = 30
- for user in (
- {'username': 'ceo', 'age': 129},
- {'username': 'grouplead1', 'age': 29, 'supervisor': '3'},
- {'username': 'grouplead2', 'age': 29, 'supervisor': '3'},
- {'username': 'worker1', 'age': 25, 'supervisor' : '4'},
- {'username': 'worker2', 'age': 24, 'supervisor' : '4'},
- {'username': 'worker3', 'age': 23, 'supervisor' : '5'},
- {'username': 'worker4', 'age': 22, 'supervisor' : '5'},
- {'username': 'worker5', 'age': 21, 'supervisor' : '5'}):
- u = self.db.user.create(**user)
- u_m [u] = self.db.msg.create(author = u, content = ' '
- , date = date.Date ('2006-01-%s' % k))
- k -= 1
- iss = self.db.issue
- for issue in (
- {'title': 'ts1', 'status': '2', 'assignedto': '6',
- 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['4']},
- {'title': 'ts2', 'status': '1', 'assignedto': '6',
- 'priority': '3', 'messages' : [u_m ['6']], 'nosy' : ['5']},
- {'title': 'ts4', 'status': '2', 'assignedto': '7',
- 'priority': '3', 'messages' : [u_m ['7']]},
- {'title': 'ts5', 'status': '1', 'assignedto': '8',
- 'priority': '3', 'messages' : [u_m ['8']]},
- {'title': 'ts6', 'status': '2', 'assignedto': '9',
- 'priority': '3', 'messages' : [u_m ['9']]},
- {'title': 'ts7', 'status': '1', 'assignedto': '10',
- 'priority': '3', 'messages' : [u_m ['10']]},
- {'title': 'ts8', 'status': '2', 'assignedto': '10',
- 'priority': '3', 'messages' : [u_m ['10']]},
- {'title': 'ts9', 'status': '1', 'assignedto': '10',
- 'priority': '3', 'messages' : [u_m ['10'], u_m ['9']]}):
- self.db.issue.create(**issue)
- return self.assertEqual, self.db.issue.filter
+ ae, filter, filter_iter = self.filteringSetup()
+
+ for filt in filter, filter_iter:
+ # ascending
+ ae(filt(None, {}, ('+','deadline'), ('+','priority')),
+ ['2', '1', '3', '4'])
+ ae(filt(None, {}, ('-','deadline'), ('+','priority')),
+ ['1', '2', '4', '3'])
+ # descending
+ ae(filt(None, {}, ('+','deadline'), ('-','priority')),
+ ['3', '4', '2', '1'])
+ ae(filt(None, {}, ('-','deadline'), ('-','priority')),
+ ['4', '3', '1', '2'])
def testFilteringTransitiveLinkUser(self):
- ae, filt = self.filteringSetupTransitiveSearch()
- ufilt = self.db.user.filter
- ae(ufilt(None, {'supervisor.username': 'ceo'}, ('+','username')),
- ['4', '5'])
- ae(ufilt(None, {'supervisor.supervisor.username': 'ceo'},
- ('+','username')), ['6', '7', '8', '9', '10'])
- ae(ufilt(None, {'supervisor.supervisor': '3'}, ('+','username')),
- ['6', '7', '8', '9', '10'])
- ae(ufilt(None, {'supervisor.supervisor.id': '3'}, ('+','username')),
- ['6', '7', '8', '9', '10'])
- ae(ufilt(None, {'supervisor.username': 'grouplead1'}, ('+','username')),
- ['6', '7'])
- ae(ufilt(None, {'supervisor.username': 'grouplead2'}, ('+','username')),
- ['8', '9', '10'])
- ae(ufilt(None, {'supervisor.username': 'grouplead2',
- 'supervisor.supervisor.username': 'ceo'}, ('+','username')),
- ['8', '9', '10'])
- ae(ufilt(None, {'supervisor.supervisor': '3', 'supervisor': '4'},
- ('+','username')), ['6', '7'])
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user')
+ for f in filter, filter_iter:
+ ae(f(None, {'supervisor.username': 'ceo'}, ('+','username')),
+ ['4', '5'])
+ ae(f(None, {'supervisor.supervisor.username': 'ceo'},
+ ('+','username')), ['6', '7', '8', '9', '10'])
+ ae(f(None, {'supervisor.supervisor': '3'}, ('+','username')),
+ ['6', '7', '8', '9', '10'])
+ ae(f(None, {'supervisor.supervisor.id': '3'}, ('+','username')),
+ ['6', '7', '8', '9', '10'])
+ ae(f(None, {'supervisor.username': 'grouplead1'}, ('+','username')),
+ ['6', '7'])
+ ae(f(None, {'supervisor.username': 'grouplead2'}, ('+','username')),
+ ['8', '9', '10'])
+ ae(f(None, {'supervisor.username': 'grouplead2',
+ 'supervisor.supervisor.username': 'ceo'}, ('+','username')),
+ ['8', '9', '10'])
+ ae(f(None, {'supervisor.supervisor': '3', 'supervisor': '4'},
+ ('+','username')), ['6', '7'])
def testFilteringTransitiveLinkSort(self):
- ae, filt = self.filteringSetupTransitiveSearch()
- ufilt = self.db.user.filter
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+ ae, ufilter, ufilter_iter = self.iterSetup('user')
# Need to make ceo his own (and first two users') supervisor,
# otherwise we will depend on sorting order of NULL values.
# Leave that to a separate test.
self.db.user.set('1', supervisor = '3')
self.db.user.set('2', supervisor = '3')
self.db.user.set('3', supervisor = '3')
- ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5'])
- ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
- ('+','supervisor.supervisor'), ('+','supervisor'),
- ('+','username')]),
- ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
- ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
- ('-','supervisor.supervisor'), ('-','supervisor'),
- ('+','username')]),
- ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('+','assignedto.supervisor'), ('+','assignedto')]),
- ['1', '2', '3', '4', '5', '6', '7', '8'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('-','assignedto.supervisor'), ('+','assignedto')]),
- ['4', '5', '6', '7', '8', '1', '2', '3'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('+','assignedto.supervisor'), ('+','assignedto'),
- ('-','status')]),
- ['2', '1', '3', '4', '5', '6', '8', '7'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('+','assignedto.supervisor'), ('+','assignedto'),
- ('+','status')]),
- ['1', '2', '3', '4', '5', '7', '6', '8'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
- ['4', '5', '7', '6', '8', '1', '2', '3'])
- ae(filt(None, {'assignedto':['6','7','8','9','10']},
- [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
- ['4', '5', '7', '6', '8', '1', '2', '3'])
- ae(filt(None, {'assignedto':['6','7','8','9']},
- [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('-','assignedto.supervisor'), ('+','assignedto'), ('+','status')]),
- ['4', '5', '1', '2', '3'])
+ for ufilt in ufilter, ufilter_iter:
+ ae(ufilt(None, {'supervisor':'3'}, []), ['1', '2', '3', '4', '5'])
+ ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+ ('+','supervisor.supervisor'), ('+','supervisor'),
+ ('+','username')]),
+ ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
+ ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+ ('-','supervisor.supervisor'), ('-','supervisor'),
+ ('+','username')]),
+ ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
+ for f in filter, filter_iter:
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('+','assignedto.supervisor'), ('+','assignedto')]),
+ ['1', '2', '3', '4', '5', '6', '7', '8'])
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto')]),
+ ['4', '5', '6', '7', '8', '1', '2', '3'])
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('+','assignedto.supervisor'), ('+','assignedto'),
+ ('-','status')]),
+ ['2', '1', '3', '4', '5', '6', '8', '7'])
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('+','assignedto.supervisor'), ('+','assignedto'),
+ ('+','status')]),
+ ['1', '2', '3', '4', '5', '7', '6', '8'])
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto'),
+ ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3'])
+ ae(f(None, {'assignedto':['6','7','8','9','10']},
+ [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto'),
+ ('+','status')]), ['4', '5', '7', '6', '8', '1', '2', '3'])
+ ae(f(None, {'assignedto':['6','7','8','9']},
+ [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto'),
+ ('+','status')]), ['4', '5', '1', '2', '3'])
def testFilteringTransitiveLinkSortNull(self):
"""Check sorting of NULL values"""
- ae, filt = self.filteringSetupTransitiveSearch()
- ufilt = self.db.user.filter
- ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
- ('+','supervisor.supervisor'), ('+','supervisor'),
- ('+','username')]),
- ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
- ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
- ('-','supervisor.supervisor'), ('-','supervisor'),
- ('+','username')]),
- ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('+','assignedto.supervisor'), ('+','assignedto')]),
- ['1', '2', '3', '4', '5', '6', '7', '8'])
- ae(filt(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
- ('+','assignedto.supervisor.supervisor'),
- ('-','assignedto.supervisor'), ('+','assignedto')]),
- ['4', '5', '6', '7', '8', '1', '2', '3'])
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+ ae, ufilter, ufilter_iter = self.iterSetup('user')
+ for ufilt in ufilter, ufilter_iter:
+ ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+ ('+','supervisor.supervisor'), ('+','supervisor'),
+ ('+','username')]),
+ ['1', '3', '2', '4', '5', '6', '7', '8', '9', '10'])
+ ae(ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+ ('-','supervisor.supervisor'), ('-','supervisor'),
+ ('+','username')]),
+ ['8', '9', '10', '6', '7', '4', '5', '1', '3', '2'])
+ for f in filter, filter_iter:
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('+','assignedto.supervisor'), ('+','assignedto')]),
+ ['1', '2', '3', '4', '5', '6', '7', '8'])
+ ae(f(None, {}, [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto')]),
+ ['4', '5', '6', '7', '8', '1', '2', '3'])
def testFilteringTransitiveLinkIssue(self):
- ae, filt = self.filteringSetupTransitiveSearch()
- ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'},
- ('+','id')), ['1', '2', '3'])
- ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'},
- ('+','id')), ['4', '5', '6', '7', '8'])
- ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
- 'status': '1'}, ('+','id')), ['4', '6', '8'])
- ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
- 'status': '2'}, ('+','id')), ['5', '7'])
- ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'],
- 'status': '2'}, ('+','id')), ['5', '7'])
- ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'},
- ('+','id')), ['1', '3', '5', '7'])
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'assignedto.supervisor.username': 'grouplead1'},
+ ('+','id')), ['1', '2', '3'])
+ ae(filt(None, {'assignedto.supervisor.username': 'grouplead2'},
+ ('+','id')), ['4', '5', '6', '7', '8'])
+ ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
+ 'status': '1'}, ('+','id')), ['4', '6', '8'])
+ ae(filt(None, {'assignedto.supervisor.username': 'grouplead2',
+ 'status': '2'}, ('+','id')), ['5', '7'])
+ ae(filt(None, {'assignedto.supervisor.username': ['grouplead2'],
+ 'status': '2'}, ('+','id')), ['5', '7'])
+ ae(filt(None, {'assignedto.supervisor': ['4', '5'], 'status': '2'},
+ ('+','id')), ['1', '3', '5', '7'])
def testFilteringTransitiveMultilink(self):
- ae, filt = self.filteringSetupTransitiveSearch()
- ae(filt(None, {'messages.author.username': 'grouplead1'},
- ('+','id')), [])
- ae(filt(None, {'messages.author': '6'},
- ('+','id')), ['1', '2'])
- ae(filt(None, {'messages.author.id': '6'},
- ('+','id')), ['1', '2'])
- ae(filt(None, {'messages.author.username': 'worker1'},
- ('+','id')), ['1', '2'])
- ae(filt(None, {'messages.author': '10'},
- ('+','id')), ['6', '7', '8'])
- ae(filt(None, {'messages.author': '9'},
- ('+','id')), ['5', '8'])
- ae(filt(None, {'messages.author': ['9', '10']},
- ('+','id')), ['5', '6', '7', '8'])
- ae(filt(None, {'messages.author': ['8', '9']},
- ('+','id')), ['4', '5', '8'])
- ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'},
- ('+','id')), ['4', '8'])
- ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'},
- ('+','id')), ['5'])
- ae(filt(None, {'messages.author': ['8', '9', '10'],
- 'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')),
- ['6', '7', '8'])
- ae(filt(None, {'nosy.supervisor.username': 'ceo'},
- ('+','id')), ['1', '2'])
- ae(filt(None, {'messages.author': ['6', '9']},
- ('+','id')), ['1', '2', '5', '8'])
- ae(filt(None, {'messages': ['5', '7']},
- ('+','id')), ['3', '5', '8'])
- ae(filt(None, {'messages.author': ['6', '9'], 'messages': ['5', '7']},
- ('+','id')), ['5', '8'])
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+ for filt in filter, filter_iter:
+ ae(filt(None, {'messages.author.username': 'grouplead1'},
+ ('+','id')), [])
+ ae(filt(None, {'messages.author': '6'},
+ ('+','id')), ['1', '2'])
+ ae(filt(None, {'messages.author.id': '6'},
+ ('+','id')), ['1', '2'])
+ ae(filt(None, {'messages.author.username': 'worker1'},
+ ('+','id')), ['1', '2'])
+ ae(filt(None, {'messages.author': '10'},
+ ('+','id')), ['6', '7', '8'])
+ ae(filt(None, {'messages.author': '9'},
+ ('+','id')), ['5', '8'])
+ ae(filt(None, {'messages.author': ['9', '10']},
+ ('+','id')), ['5', '6', '7', '8'])
+ ae(filt(None, {'messages.author': ['8', '9']},
+ ('+','id')), ['4', '5', '8'])
+ ae(filt(None, {'messages.author': ['8', '9'], 'status' : '1'},
+ ('+','id')), ['4', '8'])
+ ae(filt(None, {'messages.author': ['8', '9'], 'status' : '2'},
+ ('+','id')), ['5'])
+ ae(filt(None, {'messages.author': ['8', '9', '10'],
+ 'messages.date': '2006-01-22.21:00;2006-01-23'}, ('+','id')),
+ ['6', '7', '8'])
+ ae(filt(None, {'nosy.supervisor.username': 'ceo'},
+ ('+','id')), ['1', '2'])
+ ae(filt(None, {'messages.author': ['6', '9']},
+ ('+','id')), ['1', '2', '5', '8'])
+ ae(filt(None, {'messages': ['5', '7']},
+ ('+','id')), ['3', '5', '8'])
+ ae(filt(None, {'messages.author': ['6', '9'],
+ 'messages': ['5', '7']}, ('+','id')), ['5', '8'])
def testFilteringTransitiveMultilinkSort(self):
- ae, filt = self.filteringSetupTransitiveSearch()
+ # Note that we don't test filter_iter here, Multilink sort-order
+ # isn't defined for that.
+ ae, filt, dummy = self.filteringSetupTransitiveSearch()
ae(filt(None, {}, [('+','messages.author')]),
['1', '2', '3', '4', '5', '8', '6', '7'])
ae(filt(None, {}, [('-','messages.author')]),
['3', '1', '2', '6', '7', '5', '4', '8'])
def testFilteringSortId(self):
- ae, filt = self.filteringSetupTransitiveSearch()
- ae(self.db.user.filter(None, {}, ('+','id')),
- ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'])
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch('user')
+ for filt in filter, filter_iter:
+ ae(filt(None, {}, ('+','id')),
+ ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'])
# XXX add sorting tests for other types
def testImportExport(self):
# use the filtering setup to create a bunch of items
- ae, filt = self.filteringSetup()
+ ae, dummy1, dummy2 = self.filteringSetup()
# Get some stuff into the journal for testing import/export of
# journal data:
self.db.user.set('4', password = password.Password('xyzzy'))
import roundup.admin
import csv
# use the filtering setup to create a bunch of items
- ae, filt = self.filteringSetup()
+ ae, dummy1, dummy2 = self.filteringSetup()
# create large field
self.db.priority.create(name = 'X' * 500)
self.db.config.CSV_FIELD_SIZE = 400
shutil.rmtree(config.DATABASE)
os.makedirs(config.DATABASE + '/files')
- def open_database(self):
- self.db = self.module.Database(config, 'admin')
-
def test_reservedProperties(self):
self.open_database()
self.assertRaises(ValueError, self.module.Class, self.db, "a",
self.assertEqual(self.db.sql_index_exists('_issue', '_issue_id_idx'), 1)
self.assertEqual(self.db.sql_index_exists('_issue', '_issue_x_idx'), 0)
+class FilterCacheTest(commonDBTest):
+ def testFilteringTransitiveLinkCache(self):
+ ae, filter, filter_iter = self.filteringSetupTransitiveSearch()
+ ae, ufilter, ufilter_iter = self.iterSetup('user')
+ # Need to make ceo his own (and first two users') supervisor
+ self.db.user.set('1', supervisor = '3')
+ self.db.user.set('2', supervisor = '3')
+ self.db.user.set('3', supervisor = '3')
+ # test bool value
+ self.db.user.set('4', assignable = True)
+ self.db.user.set('3', assignable = False)
+ filt = self.db.issue.filter_iter
+ ufilt = self.db.user.filter_iter
+ user_result = \
+ { '1' : {'username': 'admin', 'assignable': None,
+ 'supervisor': '3', 'realname': None, 'roles': 'Admin',
+ 'creator': '1', 'age': None, 'actor': '1',
+ 'address': None}
+ , '2' : {'username': 'fred', 'assignable': None,
+ 'supervisor': '3', 'realname': None, 'roles': 'User',
+ 'creator': '1', 'age': None, 'actor': '1',
+ 'address': 'fred@example.com'}
+ , '3' : {'username': 'ceo', 'assignable': False,
+ 'supervisor': '3', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 129.0, 'actor': '1',
+ 'address': None}
+ , '4' : {'username': 'grouplead1', 'assignable': True,
+ 'supervisor': '3', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 29.0, 'actor': '1',
+ 'address': None}
+ , '5' : {'username': 'grouplead2', 'assignable': None,
+ 'supervisor': '3', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 29.0, 'actor': '1',
+ 'address': None}
+ , '6' : {'username': 'worker1', 'assignable': None,
+ 'supervisor': '4', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 25.0, 'actor': '1',
+ 'address': None}
+ , '7' : {'username': 'worker2', 'assignable': None,
+ 'supervisor': '4', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 24.0, 'actor': '1',
+ 'address': None}
+ , '8' : {'username': 'worker3', 'assignable': None,
+ 'supervisor': '5', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 23.0, 'actor': '1',
+ 'address': None}
+ , '9' : {'username': 'worker4', 'assignable': None,
+ 'supervisor': '5', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 22.0, 'actor': '1',
+ 'address': None}
+ , '10' : {'username': 'worker5', 'assignable': None,
+ 'supervisor': '5', 'realname': None, 'roles': None,
+ 'creator': '1', 'age': 21.0, 'actor': '1',
+ 'address': None}
+ }
+ foo = date.Interval('-1d')
+ issue_result = \
+ { '1' : {'title': 'ts1', 'status': '2', 'assignedto': '6',
+ 'priority': '3', 'messages' : ['4'], 'nosy' : ['4']}
+ , '2' : {'title': 'ts2', 'status': '1', 'assignedto': '6',
+ 'priority': '3', 'messages' : ['4'], 'nosy' : ['5']}
+ , '3' : {'title': 'ts4', 'status': '2', 'assignedto': '7',
+ 'priority': '3', 'messages' : ['5']}
+ , '4' : {'title': 'ts5', 'status': '1', 'assignedto': '8',
+ 'priority': '3', 'messages' : ['6']}
+ , '5' : {'title': 'ts6', 'status': '2', 'assignedto': '9',
+ 'priority': '3', 'messages' : ['7']}
+ , '6' : {'title': 'ts7', 'status': '1', 'assignedto': '10',
+ 'priority': '3', 'messages' : ['8'], 'foo' : None}
+ , '7' : {'title': 'ts8', 'status': '2', 'assignedto': '10',
+ 'priority': '3', 'messages' : ['8'], 'foo' : foo}
+ , '8' : {'title': 'ts9', 'status': '1', 'assignedto': '10',
+ 'priority': '3', 'messages' : ['7', '8']}
+ }
+ result = []
+ self.db.clearCache()
+ for id in ufilt(None, {}, [('+','supervisor.supervisor.supervisor'),
+ ('-','supervisor.supervisor'), ('-','supervisor'),
+ ('+','username')]):
+ result.append(id)
+ nodeid = id
+ for x in range(4):
+ assert(('user', nodeid) in self.db.cache)
+ n = self.db.user.getnode(nodeid)
+ for k, v in user_result[nodeid].iteritems():
+ ae((k, n[k]), (k, v))
+ for k in 'creation', 'activity':
+ assert(n[k])
+ nodeid = n.supervisor
+ self.db.clearCache()
+ ae (result, ['8', '9', '10', '6', '7', '1', '3', '2', '4', '5'])
+
+ result = []
+ self.db.clearCache()
+ for id in filt(None, {},
+ [('+','assignedto.supervisor.supervisor.supervisor'),
+ ('+','assignedto.supervisor.supervisor'),
+ ('-','assignedto.supervisor'), ('+','assignedto')]):
+ result.append(id)
+ assert(('issue', id) in self.db.cache)
+ n = self.db.issue.getnode(id)
+ for k, v in issue_result[id].iteritems():
+ ae((k, n[k]), (k, v))
+ for k in 'creation', 'activity':
+ assert(n[k])
+ nodeid = n.assignedto
+ for x in range(4):
+ assert(('user', nodeid) in self.db.cache)
+ n = self.db.user.getnode(nodeid)
+ for k, v in user_result[nodeid].iteritems():
+ ae((k, n[k]), (k, v))
+ for k in 'creation', 'activity':
+ assert(n[k])
+ nodeid = n.supervisor
+ self.db.clearCache()
+ ae (result, ['4', '5', '6', '7', '8', '1', '2', '3'])
+
class ClassicInitTest(unittest.TestCase):
count = 0
diff --git a/test/test_mysql.py b/test/test_mysql.py
index 760550bd83bfee4d8b6b7a844d6f9141f62ddcf4..88f014d138851c1e53f52620e8a0be982ae308b1 100644 (file)
--- a/test/test_mysql.py
+++ b/test/test_mysql.py
from roundup.backends import get_backend, have_backend
from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest
-from db_test_base import ConcurrentDBTest
+from db_test_base import ConcurrentDBTest, FilterCacheTest
class mysqlOpener:
backend = 'mysql'
def setUp(self):
mysqlOpener.setUp(self)
- ClassicInitTest.setUp(self)
+ ConcurrentDBTest.setUp(self)
def tearDown(self):
- ClassicInitTest.tearDown(self)
+ ConcurrentDBTest.tearDown(self)
+ self.nuke_database()
+
+class mysqlFilterCacheTest(mysqlOpener, FilterCacheTest):
+ backend = 'mysql'
+ def setUp(self):
+ mysqlOpener.setUp(self)
+ FilterCacheTest.setUp(self)
+ def tearDown(self):
+ FilterCacheTest.tearDown(self)
self.nuke_database()
from session_common import RDBMSTest
suite.addTest(unittest.makeSuite(mysqlClassicInitTest))
suite.addTest(unittest.makeSuite(mysqlSessionTest))
suite.addTest(unittest.makeSuite(mysqlConcurrencyTest))
+ suite.addTest(unittest.makeSuite(mysqlFilterCacheTest))
return suite
if __name__ == '__main__':
index 52e82f7a8420f054e6adc9ada52cc1bd0e999004..eb7104105de465b57ae04e77ac3db8a33ddbffd1 100644 (file)
--- a/test/test_postgresql.py
+++ b/test/test_postgresql.py
from roundup.hyperdb import DatabaseError
from db_test_base import DBTest, ROTest, config, SchemaTest, ClassicInitTest
-from db_test_base import ConcurrentDBTest
+from db_test_base import ConcurrentDBTest, FilterCacheTest
from roundup.backends import get_backend, have_backend
ConcurrentDBTest.tearDown(self)
postgresqlOpener.tearDown(self)
+class postgresqlFilterCacheTest(postgresqlOpener, FilterCacheTest):
+ backend = 'postgresql'
+ def setUp(self):
+ postgresqlOpener.setUp(self)
+ FilterCacheTest.setUp(self)
+
+ def tearDown(self):
+ FilterCacheTest.tearDown(self)
+ postgresqlOpener.tearDown(self)
+
class postgresqlSchemaTest(postgresqlOpener, SchemaTest):
def setUp(self):
postgresqlOpener.setUp(self)
suite.addTest(unittest.makeSuite(postgresqlClassicInitTest))
suite.addTest(unittest.makeSuite(postgresqlSessionTest))
suite.addTest(unittest.makeSuite(postgresqlConcurrencyTest))
+ suite.addTest(unittest.makeSuite(postgresqlFilterCacheTest))
return suite
# vim: set et sts=4 sw=4 :
diff --git a/test/test_sqlite.py b/test/test_sqlite.py
index cec5a9ad037ca35d41829ba21941596046773903..095b1d534e15964d9cd9766cc657f1eed3d02ae3 100644 (file)
--- a/test/test_sqlite.py
+++ b/test/test_sqlite.py
from roundup.backends import get_backend, have_backend
from db_test_base import DBTest, ROTest, SchemaTest, ClassicInitTest, config
-from db_test_base import ConcurrentDBTest
+from db_test_base import ConcurrentDBTest, FilterCacheTest
class sqliteOpener:
if have_backend('sqlite'):
class sqliteConcurrencyTest(ConcurrentDBTest):
backend = 'sqlite'
+class sqliteFilterCacheTest(sqliteOpener, FilterCacheTest):
+ backend = 'sqlite'
+
from session_common import RDBMSTest
class sqliteSessionTest(sqliteOpener, RDBMSTest):
pass
suite.addTest(unittest.makeSuite(sqliteClassicInitTest))
suite.addTest(unittest.makeSuite(sqliteSessionTest))
suite.addTest(unittest.makeSuite(sqliteConcurrencyTest))
+ suite.addTest(unittest.makeSuite(sqliteFilterCacheTest))
return suite
if __name__ == '__main__':