1 # $Id: back_gadfly.py,v 1.32 2003-01-12 23:53:20 richard Exp $
2 ''' Gadlfy relational database hypderb backend.
4 About Gadfly
5 ============
7 Gadfly is a collection of python modules that provides relational
8 database functionality entirely implemented in Python. It supports a
9 subset of the intergalactic standard RDBMS Structured Query Language
10 SQL.
13 Additional Instance Requirements
14 ================================
16 The instance configuration must specify where the database is. It does this
17 with GADFLY_DATABASE, which is used as the arguments to the gadfly.gadfly()
18 method:
20 Using an on-disk database directly (not a good idea):
21 GADFLY_DATABASE = (database name, directory)
23 Using a network database (much better idea):
24 GADFLY_DATABASE = (policy, password, address, port)
26 Because multiple accesses directly to a gadfly database aren't handled, but
27 multiple network accesses are, it's strongly advised that the latter setup be
28 used.
30 '''
32 # standard python modules
33 import sys, os, time, re, errno, weakref, copy
35 # roundup modules
36 from roundup import hyperdb, date, password, roundupdb, security
37 from roundup.hyperdb import String, Password, Date, Interval, Link, \
38 Multilink, DatabaseError, Boolean, Number
39 from roundup.backends import locking
41 # basic RDBMS backen implementation
42 from roundup.backends import rdbms_common
44 # the all-important gadfly :)
45 import gadfly
46 import gadfly.client
47 import gadfly.database
49 class Database(rdbms_common.Database):
50 # char to use for positional arguments
51 arg = '?'
53 def open_connection(self):
54 db = getattr(self.config, 'GADFLY_DATABASE', ('database', self.dir))
56 # lock it
57 lockfilenm = os.path.join(db[1], db[0]) + '.lck'
58 self.lockfile = locking.acquire_lock(lockfilenm)
59 self.lockfile.write(str(os.getpid()))
60 self.lockfile.flush()
62 if len(db) == 2:
63 # ensure files are group readable and writable
64 os.umask(0002)
65 try:
66 self.conn = gadfly.gadfly(*db)
67 except IOError, error:
68 if error.errno != errno.ENOENT:
69 raise
70 self.database_schema = {}
71 self.conn = gadfly.gadfly()
72 self.conn.startup(*db)
73 self.cursor = self.conn.cursor()
74 self.cursor.execute('create table schema (schema varchar)')
75 self.cursor.execute('create table ids (name varchar, num integer)')
76 else:
77 self.cursor = self.conn.cursor()
78 self.cursor.execute('select schema from schema')
79 self.database_schema = self.cursor.fetchone()[0]
80 else:
81 self.conn = gadfly.client.gfclient(*db)
82 self.database_schema = self.load_dbschema()
84 def __repr__(self):
85 return '<roundfly 0x%x>'%id(self)
87 def sql_fetchone(self):
88 ''' Fetch a single row. If there's nothing to fetch, return None.
89 '''
90 try:
91 return self.cursor.fetchone()
92 except gadfly.database.error, message:
93 if message == 'no more results':
94 return None
95 raise
97 def sql_fetchall(self):
98 ''' Fetch a single row. If there's nothing to fetch, return [].
99 '''
100 try:
101 return self.cursor.fetchall()
102 except gadfly.database.error, message:
103 if message == 'no more results':
104 return []
105 raise
107 def save_dbschema(self, schema):
108 ''' Save the schema definition that the database currently implements
109 '''
110 self.sql('insert into schema values (?)', (self.database_schema,))
112 def load_dbschema(self):
113 ''' Load the schema definition that the database currently implements
114 '''
115 self.cursor.execute('select schema from schema')
116 return self.cursor.fetchone()[0]
118 def save_journal(self, classname, cols, nodeid, journaldate,
119 journaltag, action, params):
120 ''' Save the journal entry to the database
121 '''
122 # nothing special to do
123 entry = (nodeid, journaldate, journaltag, action, params)
125 # do the insert
126 a = self.arg
127 sql = 'insert into %s__journal (%s) values (?,?,?,?,?)'%(classname,
128 cols)
129 if __debug__:
130 print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry)
131 self.cursor.execute(sql, entry)
133 def load_journal(self, classname, cols, nodeid):
134 ''' Load the journal from the database
135 '''
136 # now get the journal entries
137 sql = 'select %s from %s__journal where nodeid=%s'%(cols, classname,
138 self.arg)
139 if __debug__:
140 print >>hyperdb.DEBUG, 'getjournal', (self, sql, nodeid)
141 self.cursor.execute(sql, (nodeid,))
142 res = []
143 for nodeid, date_stamp, user, action, params in self.cursor.fetchall():
144 res.append((nodeid, date.Date(date_stamp), user, action, params))
145 return res
147 class GadflyClass:
148 def filter(self, search_matches, filterspec, sort=(None,None),
149 group=(None,None)):
150 ''' Gadfly doesn't have a LIKE predicate :(
151 '''
152 cn = self.classname
154 # figure the WHERE clause from the filterspec
155 props = self.getprops()
156 frum = ['_'+cn]
157 where = []
158 args = []
159 a = self.db.arg
160 for k, v in filterspec.items():
161 propclass = props[k]
162 if isinstance(propclass, Multilink):
163 tn = '%s_%s'%(cn, k)
164 frum.append(tn)
165 if isinstance(v, type([])):
166 s = ','.join([a for x in v])
167 where.append('id=%s.nodeid and %s.linkid in (%s)'%(tn,tn,s))
168 args = args + v
169 else:
170 where.append('id=%s.nodeid and %s.linkid = %s'%(tn, tn, a))
171 args.append(v)
172 elif isinstance(propclass, Date):
173 if isinstance(v, type([])):
174 s = ','.join([a for x in v])
175 where.append('_%s in (%s)'%(k, s))
176 args = args + [date.Date(x).serialise() for x in v]
177 else:
178 where.append('_%s=%s'%(k, a))
179 args.append(date.Date(v).serialise())
180 elif isinstance(propclass, Interval):
181 if isinstance(v, type([])):
182 s = ','.join([a for x in v])
183 where.append('_%s in (%s)'%(k, s))
184 args = args + [date.Interval(x).serialise() for x in v]
185 else:
186 where.append('_%s=%s'%(k, a))
187 args.append(date.Interval(v).serialise())
188 elif k == 'id':
189 if isinstance(v, type([])):
190 s = ','.join([a for x in v])
191 where.append('%s in (%s)'%(k, s))
192 args = args + v
193 else:
194 where.append('%s=%s'%(k, a))
195 args.append(v)
196 else:
197 if isinstance(v, type([])):
198 s = ','.join([a for x in v])
199 where.append('_%s in (%s)'%(k, s))
200 args = args + v
201 else:
202 where.append('_%s=%s'%(k, a))
203 args.append(v)
205 # add results of full text search
206 if search_matches is not None:
207 v = search_matches.keys()
208 s = ','.join([a for x in v])
209 where.append('id in (%s)'%s)
210 args = args + v
212 # "grouping" is just the first-order sorting in the SQL fetch
213 # can modify it...)
214 orderby = []
215 ordercols = []
216 if group[0] is not None and group[1] is not None:
217 if group[0] != '-':
218 orderby.append('_'+group[1])
219 ordercols.append('_'+group[1])
220 else:
221 orderby.append('_'+group[1]+' desc')
222 ordercols.append('_'+group[1])
224 # now add in the sorting
225 group = ''
226 if sort[0] is not None and sort[1] is not None:
227 direction, colname = sort
228 if direction != '-':
229 if colname == 'id':
230 orderby.append(colname)
231 else:
232 orderby.append('_'+colname)
233 ordercols.append('_'+colname)
234 else:
235 if colname == 'id':
236 orderby.append(colname+' desc')
237 ordercols.append(colname)
238 else:
239 orderby.append('_'+colname+' desc')
240 ordercols.append('_'+colname)
242 # construct the SQL
243 frum = ','.join(frum)
244 if where:
245 where = ' where ' + (' and '.join(where))
246 else:
247 where = ''
248 cols = ['id']
249 if orderby:
250 cols = cols + ordercols
251 order = ' order by %s'%(','.join(orderby))
252 else:
253 order = ''
254 cols = ','.join(cols)
255 sql = 'select %s from %s %s%s%s'%(cols, frum, where, group, order)
256 args = tuple(args)
257 if __debug__:
258 print >>hyperdb.DEBUG, 'filter', (self, sql, args)
259 self.db.cursor.execute(sql, args)
260 l = self.db.cursor.fetchall()
262 # return the IDs
263 return [row[0] for row in l]
265 def find(self, **propspec):
266 ''' Overload to filter out duplicates in the result
267 '''
268 d = {}
269 for k in rdbms_common.Class.find(self, **propspec):
270 d[k] = 1
271 return d.keys()
273 class Class(GadflyClass, rdbms_common.Class):
274 pass
275 class IssueClass(GadflyClass, rdbms_common.IssueClass):
276 pass
277 class FileClass(GadflyClass, rdbms_common.FileClass):
278 pass